From dec7875a6e23212021e4d9080330a42832dfe02a Mon Sep 17 00:00:00 2001
From: Edward Rudd <urkle@outoforder.cc>
Date: Tue, 15 Jun 2021 01:40:19 +0000
Subject: [PATCH] update SDL soruce to 2.0.14

---
 source/src/video/khronos/vulkan/vulkan.hpp | 136121 ++++++++++++++++++++++++++++++++++++++---------------------
 1 files changed, 88,332 insertions(+), 47,789 deletions(-)

diff --git a/source/src/video/khronos/vulkan/vulkan.hpp b/source/src/video/khronos/vulkan/vulkan.hpp
index bdc7e4d..070664b 100644
--- a/source/src/video/khronos/vulkan/vulkan.hpp
+++ b/source/src/video/khronos/vulkan/vulkan.hpp
@@ -1,32 +1,6 @@
-// Copyright (c) 2015-2018 The Khronos Group Inc.
+// Copyright (c) 2015-2020 The Khronos Group Inc.
 // 
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-// 
-//     http://www.apache.org/licenses/LICENSE-2.0
-// 
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-// 
-// ---- Exceptions to the Apache 2.0 License: ----
-// 
-// As an exception, if you use this Software to generate code and portions of
-// this Software are embedded into the generated code as a result, you may
-// redistribute such product without providing attribution as would otherwise
-// be required by Sections 4(a), 4(b) and 4(d) of the License.
-// 
-// In addition, if you combine or link code generated by this Software with
-// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1
-// ("`Combined Software`") and if a court of competent jurisdiction determines
-// that the patent provision (Section 3), the indemnity provision (Section 9)
-// or other Section of the License conflicts with the conditions of the
-// applicable GPL or LGPL license, you may retroactively and prospectively
-// choose to deem waived or otherwise exclude such Section(s) of the License,
-// but only in their entirety and only with respect to the Combined Software.
+// SPDX-License-Identifier: Apache-2.0 OR MIT
 //     
 
 // This header is generated from the Khronos Vulkan XML API Registry.
@@ -34,24 +8,99 @@
 #ifndef VULKAN_HPP
 #define VULKAN_HPP
 
+#if defined( _MSVC_LANG )
+#  define VULKAN_HPP_CPLUSPLUS _MSVC_LANG
+#else
+#  define VULKAN_HPP_CPLUSPLUS __cplusplus
+#endif
+
+#if 201703L < VULKAN_HPP_CPLUSPLUS
+#  define VULKAN_HPP_CPP_VERSION 20
+#elif 201402L < VULKAN_HPP_CPLUSPLUS
+#  define VULKAN_HPP_CPP_VERSION 17
+#elif 201103L < VULKAN_HPP_CPLUSPLUS
+#  define VULKAN_HPP_CPP_VERSION 14
+#elif 199711L < VULKAN_HPP_CPLUSPLUS
+#  define VULKAN_HPP_CPP_VERSION 11
+#else
+#  error "vulkan.hpp needs at least c++ standard version 11"
+#endif
+
 #include <algorithm>
 #include <array>
 #include <cstddef>
 #include <cstdint>
 #include <cstring>
+#include <functional>
 #include <initializer_list>
 #include <string>
 #include <system_error>
 #include <tuple>
 #include <type_traits>
 #include <vulkan/vulkan.h>
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+
+#if 17 <= VULKAN_HPP_CPP_VERSION
+#include <string_view>
+#endif
+
+#if defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+# if !defined(VULKAN_HPP_NO_SMART_HANDLE)
+#  define VULKAN_HPP_NO_SMART_HANDLE
+# endif
+#else
 # include <memory>
 # include <vector>
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif
+
 #if !defined(VULKAN_HPP_ASSERT)
 # include <cassert>
 # define VULKAN_HPP_ASSERT   assert
+#endif
+
+#if !defined(VULKAN_HPP_ASSERT_ON_RESULT)
+# define VULKAN_HPP_ASSERT_ON_RESULT VULKAN_HPP_ASSERT
+#endif
+
+#if !defined(VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL)
+# define VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL 1
+#endif
+
+#if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL == 1
+#  if defined( __linux__ ) || defined( __APPLE__ )
+#    include <dlfcn.h>
+#  elif defined( _WIN32 )
+typedef struct HINSTANCE__ * HINSTANCE;
+#    if defined( _WIN64 )
+typedef int64_t( __stdcall * FARPROC )();
+#    else
+typedef int( __stdcall * FARPROC )();
+#    endif
+extern "C" __declspec( dllimport ) HINSTANCE __stdcall LoadLibraryA( char const * lpLibFileName );
+extern "C" __declspec( dllimport ) int __stdcall FreeLibrary( HINSTANCE hLibModule );
+extern "C" __declspec( dllimport ) FARPROC __stdcall GetProcAddress( HINSTANCE hModule, const char * lpProcName );
+#  endif
+#endif
+
+#if !defined(__has_include)
+# define __has_include(x) false
+#endif
+
+#if ( 201711 <= __cpp_impl_three_way_comparison ) && __has_include( <compare> )
+# define VULKAN_HPP_HAS_SPACESHIP_OPERATOR
+#endif
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+# include <compare>
+#endif
+
+
+static_assert( VK_HEADER_VERSION ==  158 , "Wrong VK_HEADER_VERSION!" );
+
+// 32-bit vulkan is not typesafe for handles, so don't allow copy constructors on this platform by default.
+// To enable this feature on 32-bit platforms please define VULKAN_HPP_TYPESAFE_CONVERSION
+#if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__)
+# if !defined( VULKAN_HPP_TYPESAFE_CONVERSION )
+#  define VULKAN_HPP_TYPESAFE_CONVERSION
+# endif
 #endif
 
 // <tuple> includes <sys/sysmacros.h> through some other header
@@ -65,19 +114,9 @@
 #endif
 
 // Windows defines MemoryBarrier which is deprecated and collides
-// with the vk::MemoryBarrier struct.
-#ifdef MemoryBarrier
+// with the VULKAN_HPP_NAMESPACE::MemoryBarrier struct.
+#if defined(MemoryBarrier)
   #undef MemoryBarrier
-#endif
-
-static_assert( VK_HEADER_VERSION ==  91 , "Wrong VK_HEADER_VERSION!" );
-
-// 32-bit vulkan is not typesafe for handles, so don't allow copy constructors on this platform by default.
-// To enable this feature on 32-bit platforms please define VULKAN_HPP_TYPESAFE_CONVERSION
-#if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__)
-# if !defined( VULKAN_HPP_TYPESAFE_CONVERSION )
-#  define VULKAN_HPP_TYPESAFE_CONVERSION
-# endif
 #endif
 
 #if !defined(VULKAN_HPP_HAS_UNRESTRICTED_UNIONS)
@@ -98,11 +137,11 @@
 #endif
 
 #if !defined(VULKAN_HPP_INLINE)
-# if defined(__clang___)
+# if defined(__clang__)
 #  if __has_attribute(always_inline)
 #   define VULKAN_HPP_INLINE __attribute__((always_inline)) __inline__
 #  else
-#    define VULKAN_HPP_INLINE inline
+#   define VULKAN_HPP_INLINE inline
 #  endif
 # elif defined(__GNUC__)
 #  define VULKAN_HPP_INLINE __attribute__((always_inline)) __inline__
@@ -119,12 +158,51 @@
 # define VULKAN_HPP_TYPESAFE_EXPLICIT explicit
 #endif
 
-#if defined(_MSC_VER) && (_MSC_VER <= 1800)
-# define VULKAN_HPP_CONSTEXPR
-#else
+#if defined(__cpp_constexpr)
 # define VULKAN_HPP_CONSTEXPR constexpr
+# if __cpp_constexpr >= 201304
+#  define VULKAN_HPP_CONSTEXPR_14  constexpr
+# else
+#  define VULKAN_HPP_CONSTEXPR_14
+# endif
+# define VULKAN_HPP_CONST_OR_CONSTEXPR  constexpr
+#else
+# define VULKAN_HPP_CONSTEXPR
+# define VULKAN_HPP_CONSTEXPR_14
+# define VULKAN_HPP_CONST_OR_CONSTEXPR  const
 #endif
 
+#if !defined(VULKAN_HPP_NOEXCEPT)
+# if defined(_MSC_VER) && (_MSC_VER <= 1800)
+#  define VULKAN_HPP_NOEXCEPT
+# else
+#  define VULKAN_HPP_NOEXCEPT noexcept
+#  define VULKAN_HPP_HAS_NOEXCEPT 1
+#  if defined(VULKAN_HPP_NO_EXCEPTIONS)
+#    define VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS noexcept
+#  else
+#    define VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
+#  endif
+# endif
+#endif
+
+#if 14 <= VULKAN_HPP_CPP_VERSION
+#  define VULKAN_HPP_DEPRECATED( msg ) [[deprecated( msg )]]
+#else
+#  define VULKAN_HPP_DEPRECATED( msg )
+#endif
+
+#if ( 17 <= VULKAN_HPP_CPP_VERSION ) && !defined( VULKAN_HPP_NO_NODISCARD_WARNINGS )
+#  define VULKAN_HPP_NODISCARD [[nodiscard]]
+#  if defined(VULKAN_HPP_NO_EXCEPTIONS)
+#    define VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS [[nodiscard]]
+#  else
+#    define VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
+#  endif
+#else
+#  define VULKAN_HPP_NODISCARD
+#  define VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
+#endif
 
 #if !defined(VULKAN_HPP_NAMESPACE)
 #define VULKAN_HPP_NAMESPACE vk
@@ -137,108 +215,575 @@
 namespace VULKAN_HPP_NAMESPACE
 {
 
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+  template <typename T>
+  class ArrayProxy
+  {
+  public:
+    VULKAN_HPP_CONSTEXPR ArrayProxy() VULKAN_HPP_NOEXCEPT
+      : m_count( 0 )
+      , m_ptr( nullptr )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ArrayProxy( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_count( 0 )
+      , m_ptr( nullptr )
+    {}
+
+    ArrayProxy( T & value ) VULKAN_HPP_NOEXCEPT
+      : m_count( 1 )
+      , m_ptr( &value )
+    {}
+
+    template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
+    ArrayProxy( typename std::remove_const<T>::type & value ) VULKAN_HPP_NOEXCEPT
+      : m_count( 1 )
+      , m_ptr( &value )
+    {}
+
+    ArrayProxy( uint32_t count, T * ptr ) VULKAN_HPP_NOEXCEPT
+      : m_count( count )
+      , m_ptr( ptr )
+    {}
+
+    template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
+    ArrayProxy( uint32_t count, typename std::remove_const<T>::type * ptr ) VULKAN_HPP_NOEXCEPT
+      : m_count( count )
+      , m_ptr( ptr )
+    {}
+
+    ArrayProxy( std::initializer_list<T> const & list ) VULKAN_HPP_NOEXCEPT
+      : m_count( static_cast<uint32_t>( list.size() ) )
+      , m_ptr( list.begin() )
+    {}
+
+    template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
+    ArrayProxy( std::initializer_list<typename std::remove_const<T>::type> const & list ) VULKAN_HPP_NOEXCEPT
+      : m_count( static_cast<uint32_t>( list.size() ) )
+      , m_ptr( list.begin() )
+    {}
+
+    ArrayProxy( std::initializer_list<T> & list ) VULKAN_HPP_NOEXCEPT
+      : m_count( static_cast<uint32_t>( list.size() ) )
+      , m_ptr( list.begin() )
+    {}
+
+    template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
+    ArrayProxy( std::initializer_list<typename std::remove_const<T>::type> & list ) VULKAN_HPP_NOEXCEPT
+      : m_count( static_cast<uint32_t>( list.size() ) )
+      , m_ptr( list.begin() )
+    {}
+
+    template <size_t N>
+    ArrayProxy( std::array<T, N> const & data ) VULKAN_HPP_NOEXCEPT
+      : m_count( N )
+      , m_ptr( data.data() )
+    {}
+
+    template <size_t N, typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
+    ArrayProxy( std::array<typename std::remove_const<T>::type, N> const & data ) VULKAN_HPP_NOEXCEPT
+      : m_count( N )
+      , m_ptr( data.data() )
+    {}
+
+    template <size_t N>
+    ArrayProxy( std::array<T, N> & data ) VULKAN_HPP_NOEXCEPT
+      : m_count( N )
+      , m_ptr( data.data() )
+    {}
+
+    template <size_t N, typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
+    ArrayProxy( std::array<typename std::remove_const<T>::type, N> & data ) VULKAN_HPP_NOEXCEPT
+      : m_count( N )
+      , m_ptr( data.data() )
+    {}
+
+    template <class Allocator = std::allocator<typename std::remove_const<T>::type>>
+    ArrayProxy( std::vector<T, Allocator> const & data ) VULKAN_HPP_NOEXCEPT
+      : m_count( static_cast<uint32_t>( data.size() ) )
+      , m_ptr( data.data() )
+    {}
+
+    template <class Allocator = std::allocator<typename std::remove_const<T>::type>,
+              typename B      = T,
+              typename std::enable_if<std::is_const<B>::value, int>::type = 0>
+    ArrayProxy( std::vector<typename std::remove_const<T>::type, Allocator> const & data ) VULKAN_HPP_NOEXCEPT
+      : m_count( static_cast<uint32_t>( data.size() ) )
+      , m_ptr( data.data() )
+    {}
+
+    template <class Allocator = std::allocator<typename std::remove_const<T>::type>>
+    ArrayProxy( std::vector<T, Allocator> & data ) VULKAN_HPP_NOEXCEPT
+      : m_count( static_cast<uint32_t>( data.size() ) )
+      , m_ptr( data.data() )
+    {}
+
+    template <class Allocator = std::allocator<typename std::remove_const<T>::type>,
+              typename B      = T,
+              typename std::enable_if<std::is_const<B>::value, int>::type = 0>
+    ArrayProxy( std::vector<typename std::remove_const<T>::type, Allocator> & data ) VULKAN_HPP_NOEXCEPT
+      : m_count( static_cast<uint32_t>( data.size() ) )
+      , m_ptr( data.data() )
+    {}
+
+    const T * begin() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_ptr;
+    }
+
+    const T * end() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_ptr + m_count;
+    }
+
+    const T & front() const VULKAN_HPP_NOEXCEPT
+    {
+      VULKAN_HPP_ASSERT( m_count && m_ptr );
+      return *m_ptr;
+    }
+
+    const T & back() const VULKAN_HPP_NOEXCEPT
+    {
+      VULKAN_HPP_ASSERT( m_count && m_ptr );
+      return *( m_ptr + m_count - 1 );
+    }
+
+    bool empty() const VULKAN_HPP_NOEXCEPT
+    {
+      return ( m_count == 0 );
+    }
+
+    uint32_t size() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_count;
+    }
+
+    T * data() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_ptr;
+    }
+
+  private:
+    uint32_t m_count;
+    T *      m_ptr;
+  };
+
+  template <typename T>
+  class ArrayProxyNoTemporaries
+  {
+  public:
+    VULKAN_HPP_CONSTEXPR ArrayProxyNoTemporaries() VULKAN_HPP_NOEXCEPT
+      : m_count( 0 )
+      , m_ptr( nullptr )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ArrayProxyNoTemporaries( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_count( 0 )
+      , m_ptr( nullptr )
+    {}
+
+    template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
+    ArrayProxyNoTemporaries( typename std::remove_const<T>::type & value ) VULKAN_HPP_NOEXCEPT
+      : m_count( 1 )
+      , m_ptr( &value )
+    {}
+
+    ArrayProxyNoTemporaries( uint32_t count, T * ptr ) VULKAN_HPP_NOEXCEPT
+      : m_count( count )
+      , m_ptr( ptr )
+    {}
+
+    template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
+    ArrayProxyNoTemporaries( uint32_t count, typename std::remove_const<T>::type * ptr ) VULKAN_HPP_NOEXCEPT
+      : m_count( count )
+      , m_ptr( ptr )
+    {}
+
+    ArrayProxyNoTemporaries( std::initializer_list<T> const & list ) VULKAN_HPP_NOEXCEPT
+      : m_count( static_cast<uint32_t>( list.size() ) )
+      , m_ptr( list.begin() )
+    {}
+
+    template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
+    ArrayProxyNoTemporaries( std::initializer_list<typename std::remove_const<T>::type> const & list ) VULKAN_HPP_NOEXCEPT
+      : m_count( static_cast<uint32_t>( list.size() ) )
+      , m_ptr( list.begin() )
+    {}
+
+    ArrayProxyNoTemporaries( std::initializer_list<T> & list ) VULKAN_HPP_NOEXCEPT
+      : m_count( static_cast<uint32_t>( list.size() ) )
+      , m_ptr( list.begin() )
+    {}
+
+    template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
+    ArrayProxyNoTemporaries( std::initializer_list<typename std::remove_const<T>::type> & list ) VULKAN_HPP_NOEXCEPT
+      : m_count( static_cast<uint32_t>( list.size() ) )
+      , m_ptr( list.begin() )
+    {}
+
+    ArrayProxyNoTemporaries( std::initializer_list<T> const && list ) VULKAN_HPP_NOEXCEPT = delete;
+    ArrayProxyNoTemporaries( std::initializer_list<T> && list ) VULKAN_HPP_NOEXCEPT       = delete;
+
+    template <size_t N>
+    ArrayProxyNoTemporaries( std::array<T, N> const & data ) VULKAN_HPP_NOEXCEPT
+      : m_count( N )
+      , m_ptr( data.data() )
+    {}
+
+    template <size_t N, typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
+    ArrayProxyNoTemporaries( std::array<typename std::remove_const<T>::type, N> const & data ) VULKAN_HPP_NOEXCEPT
+      : m_count( N )
+      , m_ptr( data.data() )
+    {}
+
+    template <size_t N>
+    ArrayProxyNoTemporaries( std::array<T, N> & data ) VULKAN_HPP_NOEXCEPT
+      : m_count( N )
+      , m_ptr( data.data() )
+    {}
+
+    template <size_t N, typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
+    ArrayProxyNoTemporaries( std::array<typename std::remove_const<T>::type, N> & data ) VULKAN_HPP_NOEXCEPT
+      : m_count( N )
+      , m_ptr( data.data() )
+    {}
+
+    template <size_t N>
+    ArrayProxyNoTemporaries( std::array<T, N> const && data ) VULKAN_HPP_NOEXCEPT = delete;
+    template <size_t N>
+    ArrayProxyNoTemporaries( std::array<T, N> && data ) VULKAN_HPP_NOEXCEPT       = delete;
+
+    template <class Allocator = std::allocator<typename std::remove_const<T>::type>>
+    ArrayProxyNoTemporaries( std::vector<T, Allocator> const & data ) VULKAN_HPP_NOEXCEPT
+      : m_count( static_cast<uint32_t>( data.size() ) )
+      , m_ptr( data.data() )
+    {}
+
+    template <class Allocator = std::allocator<typename std::remove_const<T>::type>,
+              typename B      = T,
+              typename std::enable_if<std::is_const<B>::value, int>::type = 0>
+    ArrayProxyNoTemporaries( std::vector<typename std::remove_const<T>::type, Allocator> const & data ) VULKAN_HPP_NOEXCEPT
+      : m_count( static_cast<uint32_t>( data.size() ) )
+      , m_ptr( data.data() )
+    {}
+
+    template <class Allocator = std::allocator<typename std::remove_const<T>::type>>
+    ArrayProxyNoTemporaries( std::vector<T, Allocator> & data ) VULKAN_HPP_NOEXCEPT
+      : m_count( static_cast<uint32_t>( data.size() ) )
+      , m_ptr( data.data() )
+    {}
+
+    template <class Allocator = std::allocator<typename std::remove_const<T>::type>,
+              typename B      = T,
+              typename std::enable_if<std::is_const<B>::value, int>::type = 0>
+    ArrayProxyNoTemporaries( std::vector<typename std::remove_const<T>::type, Allocator> & data ) VULKAN_HPP_NOEXCEPT
+      : m_count( static_cast<uint32_t>( data.size() ) )
+      , m_ptr( data.data() )
+    {}
+
+    ArrayProxyNoTemporaries( std::vector<T> const && data ) VULKAN_HPP_NOEXCEPT = delete;
+    ArrayProxyNoTemporaries( std::vector<T> && data ) VULKAN_HPP_NOEXCEPT       = delete;
+
+    const T * begin() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_ptr;
+    }
+
+    const T * end() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_ptr + m_count;
+    }
+
+    const T & front() const VULKAN_HPP_NOEXCEPT
+    {
+      VULKAN_HPP_ASSERT( m_count && m_ptr );
+      return *m_ptr;
+    }
+
+    const T & back() const VULKAN_HPP_NOEXCEPT
+    {
+      VULKAN_HPP_ASSERT( m_count && m_ptr );
+      return *( m_ptr + m_count - 1 );
+    }
+
+    bool empty() const VULKAN_HPP_NOEXCEPT
+    {
+      return ( m_count == 0 );
+    }
+
+    uint32_t size() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_count;
+    }
+
+    T * data() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_ptr;
+    }
+
+  private:
+    uint32_t m_count;
+    T *      m_ptr;
+  };
+#endif
+
+  template <typename T, size_t N>
+  class ArrayWrapper1D : public std::array<T,N>
+  {
+  public:
+    VULKAN_HPP_CONSTEXPR ArrayWrapper1D() VULKAN_HPP_NOEXCEPT
+      : std::array<T, N>()
+    {}
+
+    VULKAN_HPP_CONSTEXPR ArrayWrapper1D(std::array<T,N> const& data) VULKAN_HPP_NOEXCEPT
+      : std::array<T, N>(data)
+    {}
+
+#if defined(_WIN32) && !defined(_WIN64)
+    VULKAN_HPP_CONSTEXPR T const& operator[](int index) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::array<T, N>::operator[](index);
+    }
+
+    VULKAN_HPP_CONSTEXPR T & operator[](int index) VULKAN_HPP_NOEXCEPT
+    {
+      return std::array<T, N>::operator[](index);
+    }
+#endif
+
+    operator T const* () const VULKAN_HPP_NOEXCEPT
+    {
+      return this->data();
+    }
+
+    operator T * () VULKAN_HPP_NOEXCEPT
+    {
+      return this->data();
+    }
+
+    template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
+    operator std::string() const
+    {
+      return std::string( this->data() );
+    }
+
+#if 17 <= VULKAN_HPP_CPP_VERSION
+    template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
+    operator std::string_view() const
+    {
+      return std::string_view( this->data() );
+    }
+#endif
+
+    template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
+    bool operator<( ArrayWrapper1D<char, N> const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return *static_cast<std::array<char, N> const *>( this ) < *static_cast<std::array<char, N> const *>( &rhs );
+    }
+
+    template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
+    bool operator<=( ArrayWrapper1D<char, N> const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return *static_cast<std::array<char, N> const *>( this ) <= *static_cast<std::array<char, N> const *>( &rhs );
+    }
+
+    template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
+    bool operator>( ArrayWrapper1D<char, N> const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return *static_cast<std::array<char, N> const *>( this ) > *static_cast<std::array<char, N> const *>( &rhs );
+    }
+
+    template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
+    bool operator>=( ArrayWrapper1D<char, N> const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return *static_cast<std::array<char, N> const *>( this ) >= *static_cast<std::array<char, N> const *>( &rhs );
+    }
+
+    template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
+    bool operator==( ArrayWrapper1D<char, N> const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return *static_cast<std::array<char, N> const *>( this ) == *static_cast<std::array<char, N> const *>( &rhs );
+    }
+
+    template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
+    bool operator!=( ArrayWrapper1D<char, N> const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return *static_cast<std::array<char, N> const *>( this ) != *static_cast<std::array<char, N> const *>( &rhs );
+    }
+  };
+
+  // specialization of relational operators between std::string and arrays of chars
+  template <size_t N>
+  bool operator<(std::string const& lhs, ArrayWrapper1D<char, N> const& rhs) VULKAN_HPP_NOEXCEPT
+  {
+    return lhs < rhs.data();
+  }
+
+  template <size_t N>
+  bool operator<=(std::string const& lhs, ArrayWrapper1D<char, N> const& rhs) VULKAN_HPP_NOEXCEPT
+  {
+    return lhs <= rhs.data();
+  }
+
+  template <size_t N>
+  bool operator>(std::string const& lhs, ArrayWrapper1D<char, N> const& rhs) VULKAN_HPP_NOEXCEPT
+  {
+    return lhs > rhs.data();
+  }
+
+  template <size_t N>
+  bool operator>=(std::string const& lhs, ArrayWrapper1D<char, N> const& rhs) VULKAN_HPP_NOEXCEPT
+  {
+    return lhs >= rhs.data();
+  }
+
+  template <size_t N>
+  bool operator==(std::string const& lhs, ArrayWrapper1D<char, N> const& rhs) VULKAN_HPP_NOEXCEPT
+  {
+    return lhs == rhs.data();
+  }
+
+  template <size_t N>
+  bool operator!=(std::string const& lhs, ArrayWrapper1D<char, N> const& rhs) VULKAN_HPP_NOEXCEPT
+  {
+    return lhs != rhs.data();
+  }
+
+  template <typename T, size_t N, size_t M>
+  class ArrayWrapper2D : public std::array<ArrayWrapper1D<T,M>,N>
+  {
+  public:
+    VULKAN_HPP_CONSTEXPR ArrayWrapper2D() VULKAN_HPP_NOEXCEPT
+      : std::array<ArrayWrapper1D<T,M>, N>()
+    {}
+
+    VULKAN_HPP_CONSTEXPR ArrayWrapper2D(std::array<std::array<T,M>,N> const& data) VULKAN_HPP_NOEXCEPT
+      : std::array<ArrayWrapper1D<T,M>, N>(*reinterpret_cast<std::array<ArrayWrapper1D<T,M>,N> const*>(&data))
+    {}
+  };
+
   template <typename FlagBitsType> struct FlagTraits
   {
     enum { allFlags = 0 };
   };
 
-  template <typename BitType, typename MaskType = VkFlags>
+  template <typename BitType>
   class Flags
   {
   public:
-    VULKAN_HPP_CONSTEXPR Flags()
+    using MaskType = typename std::underlying_type<BitType>::type;
+
+    // constructors
+    VULKAN_HPP_CONSTEXPR Flags() VULKAN_HPP_NOEXCEPT
       : m_mask(0)
-    {
-    }
+    {}
 
-    Flags(BitType bit)
+    VULKAN_HPP_CONSTEXPR Flags(BitType bit) VULKAN_HPP_NOEXCEPT
       : m_mask(static_cast<MaskType>(bit))
-    {
-    }
+    {}
 
-    Flags(Flags<BitType> const& rhs)
+    VULKAN_HPP_CONSTEXPR Flags(Flags<BitType> const& rhs) VULKAN_HPP_NOEXCEPT
       : m_mask(rhs.m_mask)
-    {
-    }
+    {}
 
-    explicit Flags(MaskType flags)
+    VULKAN_HPP_CONSTEXPR explicit Flags(MaskType flags) VULKAN_HPP_NOEXCEPT
       : m_mask(flags)
+    {}
+
+    // relational operators
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>(Flags<BitType> const&) const = default;
+#else
+    VULKAN_HPP_CONSTEXPR bool operator<(Flags<BitType> const& rhs) const VULKAN_HPP_NOEXCEPT
     {
+      return m_mask < rhs.m_mask;
     }
 
-    Flags<BitType> & operator=(Flags<BitType> const& rhs)
+    VULKAN_HPP_CONSTEXPR bool operator<=(Flags<BitType> const& rhs) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_mask <= rhs.m_mask;
+    }
+
+    VULKAN_HPP_CONSTEXPR bool operator>(Flags<BitType> const& rhs) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_mask > rhs.m_mask;
+    }
+
+    VULKAN_HPP_CONSTEXPR bool operator>=(Flags<BitType> const& rhs) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_mask >= rhs.m_mask;
+    }
+
+    VULKAN_HPP_CONSTEXPR bool operator==(Flags<BitType> const& rhs) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_mask == rhs.m_mask;
+    }
+
+    VULKAN_HPP_CONSTEXPR bool operator!=(Flags<BitType> const& rhs) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_mask != rhs.m_mask;
+    }
+#endif
+
+    // logical operator
+    VULKAN_HPP_CONSTEXPR bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return !m_mask;
+    }
+
+    // bitwise operators
+    VULKAN_HPP_CONSTEXPR Flags<BitType> operator&(Flags<BitType> const& rhs) const VULKAN_HPP_NOEXCEPT
+    {
+      return Flags<BitType>(m_mask & rhs.m_mask);
+    }
+
+    VULKAN_HPP_CONSTEXPR Flags<BitType> operator|(Flags<BitType> const& rhs) const VULKAN_HPP_NOEXCEPT
+    {
+      return Flags<BitType>(m_mask | rhs.m_mask);
+    }
+
+    VULKAN_HPP_CONSTEXPR Flags<BitType> operator^(Flags<BitType> const& rhs) const VULKAN_HPP_NOEXCEPT
+    {
+      return Flags<BitType>(m_mask ^ rhs.m_mask);
+    }
+
+    VULKAN_HPP_CONSTEXPR Flags<BitType> operator~() const VULKAN_HPP_NOEXCEPT
+    {
+      return Flags<BitType>(m_mask ^ FlagTraits<BitType>::allFlags);
+    }
+
+    // assignment operators
+    VULKAN_HPP_CONSTEXPR_14 Flags<BitType> & operator=(Flags<BitType> const& rhs) VULKAN_HPP_NOEXCEPT
     {
       m_mask = rhs.m_mask;
       return *this;
     }
 
-    Flags<BitType> & operator|=(Flags<BitType> const& rhs)
+    VULKAN_HPP_CONSTEXPR_14 Flags<BitType> & operator|=(Flags<BitType> const& rhs) VULKAN_HPP_NOEXCEPT
     {
       m_mask |= rhs.m_mask;
       return *this;
     }
 
-    Flags<BitType> & operator&=(Flags<BitType> const& rhs)
+    VULKAN_HPP_CONSTEXPR_14 Flags<BitType> & operator&=(Flags<BitType> const& rhs) VULKAN_HPP_NOEXCEPT
     {
       m_mask &= rhs.m_mask;
       return *this;
     }
 
-    Flags<BitType> & operator^=(Flags<BitType> const& rhs)
+    VULKAN_HPP_CONSTEXPR_14 Flags<BitType> & operator^=(Flags<BitType> const& rhs) VULKAN_HPP_NOEXCEPT
     {
       m_mask ^= rhs.m_mask;
       return *this;
     }
 
-    Flags<BitType> operator|(Flags<BitType> const& rhs) const
-    {
-      Flags<BitType> result(*this);
-      result |= rhs;
-      return result;
-    }
-
-    Flags<BitType> operator&(Flags<BitType> const& rhs) const
-    {
-      Flags<BitType> result(*this);
-      result &= rhs;
-      return result;
-    }
-
-    Flags<BitType> operator^(Flags<BitType> const& rhs) const
-    {
-      Flags<BitType> result(*this);
-      result ^= rhs;
-      return result;
-    }
-
-    bool operator!() const
-    {
-      return !m_mask;
-    }
-
-    Flags<BitType> operator~() const
-    {
-      Flags<BitType> result(*this);
-      result.m_mask ^= FlagTraits<BitType>::allFlags;
-      return result;
-    }
-
-    bool operator==(Flags<BitType> const& rhs) const
-    {
-      return m_mask == rhs.m_mask;
-    }
-
-    bool operator!=(Flags<BitType> const& rhs) const
-    {
-      return m_mask != rhs.m_mask;
-    }
-
-    explicit operator bool() const
+    // cast operators
+    explicit VULKAN_HPP_CONSTEXPR operator bool() const VULKAN_HPP_NOEXCEPT
     {
       return !!m_mask;
     }
 
-    explicit operator MaskType() const
+    explicit VULKAN_HPP_CONSTEXPR operator MaskType() const VULKAN_HPP_NOEXCEPT
     {
         return m_mask;
     }
@@ -247,135 +792,305 @@
     MaskType  m_mask;
   };
 
+#if !defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+  // relational operators only needed for pre C++20
   template <typename BitType>
-  Flags<BitType> operator|(BitType bit, Flags<BitType> const& flags)
+  VULKAN_HPP_CONSTEXPR bool operator<(BitType bit, Flags<BitType> const& flags) VULKAN_HPP_NOEXCEPT
   {
-    return flags | bit;
+    return flags.operator>( bit );
   }
 
   template <typename BitType>
-  Flags<BitType> operator&(BitType bit, Flags<BitType> const& flags)
+  VULKAN_HPP_CONSTEXPR bool operator<=(BitType bit, Flags<BitType> const& flags) VULKAN_HPP_NOEXCEPT
   {
-    return flags & bit;
+    return flags.operator>=( bit );
   }
 
   template <typename BitType>
-  Flags<BitType> operator^(BitType bit, Flags<BitType> const& flags)
+  VULKAN_HPP_CONSTEXPR bool operator>(BitType bit, Flags<BitType> const& flags) VULKAN_HPP_NOEXCEPT
   {
-    return flags ^ bit;
+    return flags.operator<( bit );
   }
 
+  template <typename BitType>
+  VULKAN_HPP_CONSTEXPR bool operator>=(BitType bit, Flags<BitType> const& flags) VULKAN_HPP_NOEXCEPT
+  {
+    return flags.operator<=(bit);
+  }
+
+  template <typename BitType>
+  VULKAN_HPP_CONSTEXPR bool operator==(BitType bit, Flags<BitType> const& flags) VULKAN_HPP_NOEXCEPT
+  {
+    return flags.operator==( bit );
+  }
+
+  template <typename BitType>
+  VULKAN_HPP_CONSTEXPR bool operator!=(BitType bit, Flags<BitType> const& flags) VULKAN_HPP_NOEXCEPT
+  {
+    return flags.operator!=( bit );
+  }
+#endif
+
+  // bitwise operators
+  template <typename BitType>
+  VULKAN_HPP_CONSTEXPR Flags<BitType> operator&(BitType bit, Flags<BitType> const& flags) VULKAN_HPP_NOEXCEPT
+  {
+    return flags.operator&( bit );
+  }
+
+  template <typename BitType>
+  VULKAN_HPP_CONSTEXPR Flags<BitType> operator|(BitType bit, Flags<BitType> const& flags) VULKAN_HPP_NOEXCEPT
+  {
+    return flags.operator|( bit );
+  }
+
+  template <typename BitType>
+  VULKAN_HPP_CONSTEXPR Flags<BitType> operator^(BitType bit, Flags<BitType> const& flags) VULKAN_HPP_NOEXCEPT
+  {
+    return flags.operator^( bit );
+  }
 
   template <typename RefType>
   class Optional
   {
   public:
-    Optional(RefType & reference) { m_ptr = &reference; }
-    Optional(RefType * ptr) { m_ptr = ptr; }
-    Optional(std::nullptr_t) { m_ptr = nullptr; }
+    Optional(RefType & reference) VULKAN_HPP_NOEXCEPT { m_ptr = &reference; }
+    Optional(RefType * ptr) VULKAN_HPP_NOEXCEPT { m_ptr = ptr; }
+    Optional(std::nullptr_t) VULKAN_HPP_NOEXCEPT { m_ptr = nullptr; }
 
-    operator RefType*() const { return m_ptr; }
-    RefType const* operator->() const { return m_ptr; }
-    explicit operator bool() const { return !!m_ptr; }
+    operator RefType*() const VULKAN_HPP_NOEXCEPT { return m_ptr; }
+    RefType const* operator->() const VULKAN_HPP_NOEXCEPT { return m_ptr; }
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT { return !!m_ptr; }
 
   private:
     RefType *m_ptr;
   };
 
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template <typename T>
-  class ArrayProxy
+  template <typename X, typename Y> struct StructExtends { enum { value = false }; };
+
+  template<typename Type, class...>
+  struct IsPartOfStructureChain
+  {
+    static const bool valid = false;
+  };
+
+  template<typename Type, typename Head, typename... Tail>
+  struct IsPartOfStructureChain<Type, Head, Tail...>
+  {
+    static const bool valid = std::is_same<Type, Head>::value || IsPartOfStructureChain<Type, Tail...>::valid;
+  };
+
+  template <size_t Index, typename T, typename... ChainElements>
+  struct StructureChainContains
+  {
+    static const bool value = std::is_same<T, typename std::tuple_element<Index, std::tuple<ChainElements...>>::type>::value ||
+                              StructureChainContains<Index - 1, T, ChainElements...>::value;
+  };
+
+  template <typename T, typename... ChainElements>
+  struct StructureChainContains<0, T, ChainElements...>
+  {
+    static const bool value = std::is_same<T, typename std::tuple_element<0, std::tuple<ChainElements...>>::type>::value;
+  };
+
+  template <size_t Index, typename... ChainElements>
+  struct StructureChainValidation
+  {
+    using TestType = typename std::tuple_element<Index, std::tuple<ChainElements...>>::type;
+    static const bool valid =
+      StructExtends<TestType, typename std::tuple_element<0, std::tuple<ChainElements...>>::type>::value &&
+      ( TestType::allowDuplicate || !StructureChainContains<Index - 1, TestType, ChainElements...>::value ) &&
+      StructureChainValidation<Index - 1, ChainElements...>::valid;
+  };
+
+  template <typename... ChainElements>
+  struct StructureChainValidation<0, ChainElements...>
+  {
+    static const bool valid = true;
+  };
+
+  template <typename... ChainElements>
+  class StructureChain : public std::tuple<ChainElements...>
   {
   public:
-    VULKAN_HPP_CONSTEXPR ArrayProxy(std::nullptr_t)
-      : m_count(0)
-      , m_ptr(nullptr)
-    {}
-
-    ArrayProxy(T & ptr)
-      : m_count(1)
-      , m_ptr(&ptr)
-    {}
-
-    ArrayProxy(uint32_t count, T * ptr)
-      : m_count(count)
-      , m_ptr(ptr)
-    {}
-
-    template <size_t N>
-    ArrayProxy(std::array<typename std::remove_const<T>::type, N> & data)
-      : m_count(N)
-      , m_ptr(data.data())
-    {}
-
-    template <size_t N>
-    ArrayProxy(std::array<typename std::remove_const<T>::type, N> const& data)
-      : m_count(N)
-      , m_ptr(data.data())
-    {}
-
-    template <class Allocator = std::allocator<typename std::remove_const<T>::type>>
-    ArrayProxy(std::vector<typename std::remove_const<T>::type, Allocator> & data)
-      : m_count(static_cast<uint32_t>(data.size()))
-      , m_ptr(data.data())
-    {}
-
-    template <class Allocator = std::allocator<typename std::remove_const<T>::type>>
-    ArrayProxy(std::vector<typename std::remove_const<T>::type, Allocator> const& data)
-      : m_count(static_cast<uint32_t>(data.size()))
-      , m_ptr(data.data())
-    {}
-
-    ArrayProxy(std::initializer_list<T> const& data)
-      : m_count(static_cast<uint32_t>(data.end() - data.begin()))
-      , m_ptr(data.begin())
-    {}
-
-    const T * begin() const
+    StructureChain() VULKAN_HPP_NOEXCEPT
     {
-      return m_ptr;
+      static_assert( StructureChainValidation<sizeof...( ChainElements ) - 1, ChainElements...>::valid,
+                     "The structure chain is not valid!" );
+      link<sizeof...( ChainElements ) - 1>();
     }
 
-    const T * end() const
+    StructureChain( StructureChain const & rhs ) VULKAN_HPP_NOEXCEPT : std::tuple<ChainElements...>( rhs )
     {
-      return m_ptr + m_count;
+      static_assert( StructureChainValidation<sizeof...( ChainElements ) - 1, ChainElements...>::valid,
+                     "The structure chain is not valid!" );
+      link<sizeof...( ChainElements ) - 1>();
     }
 
-    const T & front() const
+    StructureChain( StructureChain && rhs ) VULKAN_HPP_NOEXCEPT
+      : std::tuple<ChainElements...>( std::forward<std::tuple<ChainElements...>>( rhs ) )
     {
-      VULKAN_HPP_ASSERT(m_count && m_ptr);
-      return *m_ptr;
+      static_assert( StructureChainValidation<sizeof...( ChainElements ) - 1, ChainElements...>::valid,
+                     "The structure chain is not valid!" );
+      link<sizeof...( ChainElements ) - 1>();
     }
 
-    const T & back() const
+    StructureChain( ChainElements const &... elems ) VULKAN_HPP_NOEXCEPT : std::tuple<ChainElements...>( elems... )
     {
-      VULKAN_HPP_ASSERT(m_count && m_ptr);
-      return *(m_ptr + m_count - 1);
+      static_assert( StructureChainValidation<sizeof...( ChainElements ) - 1, ChainElements...>::valid,
+                     "The structure chain is not valid!" );
+      link<sizeof...( ChainElements ) - 1>();
     }
 
-    bool empty() const
+    StructureChain & operator=( StructureChain const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      return (m_count == 0);
+      std::tuple<ChainElements...>::operator=( rhs );
+      link<sizeof...( ChainElements ) - 1>();
+      return *this;
     }
 
-    uint32_t size() const
+    StructureChain & operator=( StructureChain && rhs ) = delete;
+
+    template <typename T, size_t Which = 0>
+    T & get() VULKAN_HPP_NOEXCEPT
     {
-      return m_count;
+      return std::get<ChainElementIndex<0, T, Which, void, ChainElements...>::value>( static_cast<std::tuple<ChainElements...>&>( *this ) );
     }
 
-    T * data() const
+    template <typename T, size_t Which = 0>
+    T const & get() const VULKAN_HPP_NOEXCEPT
     {
-      return m_ptr;
+      return std::get<ChainElementIndex<0, T, Which, void, ChainElements...>::value>( static_cast<std::tuple<ChainElements...>&>( *this ) );
+    }
+
+    template <typename T0, typename T1, typename... Ts>
+    std::tuple<T0 &, T1 &, Ts &...> get() VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( get<T0>(), get<T1>(), get<Ts>()... );
+    }
+
+    template <typename T0, typename T1, typename... Ts>
+    std::tuple<T0 const &, T1 const &, Ts const &...> get() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( get<T0>(), get<T1>(), get<Ts>()... );
+    }
+
+    template <typename ClassType, size_t Which = 0>
+    void relink() VULKAN_HPP_NOEXCEPT
+    {
+      static_assert( IsPartOfStructureChain<ClassType, ChainElements...>::valid,
+                     "Can't relink Structure that's not part of this StructureChain!" );
+      static_assert(
+        !std::is_same<ClassType, typename std::tuple_element<0, std::tuple<ChainElements...>>::type>::value || (Which != 0),
+        "It's not allowed to have the first element unlinked!" );
+
+      auto pNext = reinterpret_cast<VkBaseInStructure *>( &get<ClassType, Which>() );
+      VULKAN_HPP_ASSERT( !isLinked( pNext ) );
+      auto & headElement = std::get<0>( static_cast<std::tuple<ChainElements...>&>( *this ) );
+      pNext->pNext       = reinterpret_cast<VkBaseInStructure const*>(headElement.pNext);
+      headElement.pNext  = pNext;
+    }
+
+    template <typename ClassType, size_t Which = 0>
+    void unlink() VULKAN_HPP_NOEXCEPT
+    {
+      static_assert( IsPartOfStructureChain<ClassType, ChainElements...>::valid,
+                     "Can't unlink Structure that's not part of this StructureChain!" );
+      static_assert(
+        !std::is_same<ClassType, typename std::tuple_element<0, std::tuple<ChainElements...>>::type>::value || (Which != 0),
+        "It's not allowed to unlink the first element!" );
+
+      unlink<sizeof...( ChainElements ) - 1>( reinterpret_cast<VkBaseOutStructure const *>( &get<ClassType, Which>() ) );
     }
 
   private:
-    uint32_t  m_count;
-    T *       m_ptr;
+    template <int Index, typename T, int Which, typename, class First, class... Types>
+    struct ChainElementIndex : ChainElementIndex<Index + 1, T, Which, void, Types...>
+    {};
+
+    template <int Index, typename T, int Which, class First, class... Types>
+    struct ChainElementIndex<Index,
+                             T,
+                             Which,
+                             typename std::enable_if<!std::is_same<T, First>::value, void>::type,
+                             First,
+                             Types...> : ChainElementIndex<Index + 1, T, Which, void, Types...>
+    {};
+
+    template <int Index, typename T, int Which, class First, class... Types>
+    struct ChainElementIndex<Index,
+                             T,
+                             Which,
+                             typename std::enable_if<std::is_same<T, First>::value, void>::type,
+                             First,
+                             Types...> : ChainElementIndex<Index + 1, T, Which - 1, void, Types...>
+    {};
+
+    template <int Index, typename T, class First, class... Types>
+    struct ChainElementIndex<Index,
+                             T,
+                             0,
+                             typename std::enable_if<std::is_same<T, First>::value, void>::type,
+                             First,
+                             Types...> : std::integral_constant<int, Index>
+    {};
+
+    bool isLinked( VkBaseInStructure const * pNext )
+    {
+      VkBaseInStructure const * elementPtr = reinterpret_cast<VkBaseInStructure const*>(&std::get<0>( static_cast<std::tuple<ChainElements...>&>( *this ) ) );
+      while ( elementPtr )
+      {
+        if ( elementPtr->pNext == pNext )
+        {
+          return true;
+        }
+        elementPtr = elementPtr->pNext;
+      }
+      return false;
+    }
+
+    template <size_t Index>
+    typename std::enable_if<Index != 0, void>::type link() VULKAN_HPP_NOEXCEPT
+    {
+      auto & x = std::get<Index - 1>( static_cast<std::tuple<ChainElements...>&>( *this ) );
+      x.pNext  = &std::get<Index>( static_cast<std::tuple<ChainElements...>&>( *this ) );
+      link<Index - 1>();
+    }
+
+    template <size_t Index>
+    typename std::enable_if<Index == 0, void>::type link() VULKAN_HPP_NOEXCEPT
+    {}
+
+    template <size_t Index>
+    typename std::enable_if<Index != 0, void>::type unlink( VkBaseOutStructure const * pNext ) VULKAN_HPP_NOEXCEPT
+    {
+      auto & element = std::get<Index>( static_cast<std::tuple<ChainElements...>&>( *this ) );
+      if ( element.pNext == pNext )
+      {
+        element.pNext = pNext->pNext;
+      }
+      else
+      {
+        unlink<Index - 1>( pNext );
+      }
+    }
+
+    template <size_t Index>
+    typename std::enable_if<Index == 0, void>::type unlink( VkBaseOutStructure const * pNext ) VULKAN_HPP_NOEXCEPT
+    {
+      auto & element = std::get<0>( static_cast<std::tuple<ChainElements...>&>( *this ) );
+      if ( element.pNext == pNext )
+      {
+        element.pNext = pNext->pNext;
+      }
+      else
+      {
+        VULKAN_HPP_ASSERT( false );  // fires, if the ClassType member has already been unlinked !
+      }
+    }
   };
-#endif
 
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-
+#if !defined(VULKAN_HPP_NO_SMART_HANDLE)
   template <typename Type, typename Dispatch> class UniqueHandleTraits;
 
   template <typename Type, typename Dispatch>
@@ -383,69 +1098,77 @@
   {
   private:
     using Deleter = typename UniqueHandleTraits<Type,Dispatch>::deleter;
+
   public:
-    explicit UniqueHandle( Type const& value = Type(), Deleter const& deleter = Deleter() )
+    using element_type = Type;
+
+    UniqueHandle()
+      : Deleter()
+      , m_value()
+    {}
+
+    explicit UniqueHandle( Type const& value, Deleter const& deleter = Deleter() ) VULKAN_HPP_NOEXCEPT
       : Deleter( deleter)
       , m_value( value )
     {}
 
     UniqueHandle( UniqueHandle const& ) = delete;
 
-    UniqueHandle( UniqueHandle && other )
+    UniqueHandle( UniqueHandle && other ) VULKAN_HPP_NOEXCEPT
       : Deleter( std::move( static_cast<Deleter&>( other ) ) )
       , m_value( other.release() )
     {}
 
-    ~UniqueHandle()
+    ~UniqueHandle() VULKAN_HPP_NOEXCEPT
     {
       if ( m_value ) this->destroy( m_value );
     }
 
     UniqueHandle & operator=( UniqueHandle const& ) = delete;
 
-    UniqueHandle & operator=( UniqueHandle && other )
+    UniqueHandle & operator=( UniqueHandle && other ) VULKAN_HPP_NOEXCEPT
     {
       reset( other.release() );
       *static_cast<Deleter*>(this) = std::move( static_cast<Deleter&>(other) );
       return *this;
     }
 
-    explicit operator bool() const
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
     {
       return m_value.operator bool();
     }
 
-    Type const* operator->() const
+    Type const* operator->() const VULKAN_HPP_NOEXCEPT
     {
       return &m_value;
     }
 
-    Type * operator->()
+    Type * operator->() VULKAN_HPP_NOEXCEPT
     {
       return &m_value;
     }
 
-    Type const& operator*() const
+    Type const& operator*() const VULKAN_HPP_NOEXCEPT
     {
       return m_value;
     }
 
-    Type & operator*()
+    Type & operator*() VULKAN_HPP_NOEXCEPT
     {
       return m_value;
     }
 
-    const Type & get() const
-    {
-      return m_value;
-    }
-    
-    Type & get()
+    const Type & get() const VULKAN_HPP_NOEXCEPT
     {
       return m_value;
     }
 
-    void reset( Type const& value = Type() )
+    Type & get() VULKAN_HPP_NOEXCEPT
+    {
+      return m_value;
+    }
+
+    void reset( Type const& value = Type() ) VULKAN_HPP_NOEXCEPT
     {
       if ( m_value != value )
       {
@@ -454,14 +1177,14 @@
       }
     }
 
-    Type release()
+    Type release() VULKAN_HPP_NOEXCEPT
     {
       Type value = m_value;
       m_value = nullptr;
       return value;
     }
 
-    void swap( UniqueHandle<Type,Dispatch> & rhs )
+    void swap( UniqueHandle<Type,Dispatch> & rhs ) VULKAN_HPP_NOEXCEPT
     {
       std::swap(m_value, rhs.m_value);
       std::swap(static_cast<Deleter&>(*this), static_cast<Deleter&>(rhs));
@@ -471,2042 +1194,2402 @@
     Type    m_value;
   };
 
+  template <typename UniqueType>
+  VULKAN_HPP_INLINE std::vector<typename UniqueType::element_type> uniqueToRaw(std::vector<UniqueType> const& handles)
+  {
+    std::vector<typename UniqueType::element_type> newBuffer(handles.size());
+    std::transform(handles.begin(), handles.end(), newBuffer.begin(), [](UniqueType const& handle) { return handle.get(); });
+    return newBuffer;
+  }
+
   template <typename Type, typename Dispatch>
-  VULKAN_HPP_INLINE void swap( UniqueHandle<Type,Dispatch> & lhs, UniqueHandle<Type,Dispatch> & rhs )
+  VULKAN_HPP_INLINE void swap( UniqueHandle<Type,Dispatch> & lhs, UniqueHandle<Type,Dispatch> & rhs ) VULKAN_HPP_NOEXCEPT
   {
     lhs.swap( rhs );
   }
 #endif
 
-
-
-  template <typename X, typename Y> struct isStructureChainValid { enum { value = false }; };
-
-  template <typename P, typename T>
-  struct TypeList
-  {
-    using list = P;
-    using last = T;
-  };
-
-  template <typename List, typename X>
-  struct extendCheck
-  {
-    static const bool valid = isStructureChainValid<typename List::last, X>::value || extendCheck<typename List::list,X>::valid;
-  };
-
-  template <typename T, typename X>
-  struct extendCheck<TypeList<void,T>,X>
-  {
-    static const bool valid = isStructureChainValid<T, X>::value;
-  };
-
-  template <typename X>
-  struct extendCheck<void,X>
-  {
-    static const bool valid = true;
-  };
-
-  template <class Element>
-  class StructureChainElement
+#if !defined(VK_NO_PROTOTYPES)
+  class DispatchLoaderStatic
   {
   public:
-    explicit operator Element&() { return value; }
-    explicit operator const Element&() const { return value; }
-  private:
-    Element value;
-  };
-
-  template<typename ...StructureElements>
-  class StructureChain : private StructureChainElement<StructureElements>...
-  {
-  public:
-    StructureChain()
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    VkResult vkAcquireFullScreenExclusiveModeEXT( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT
     {
-      link<void, StructureElements...>();  
+      return ::vkAcquireFullScreenExclusiveModeEXT( device, swapchain );
+    }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+    VkResult vkAcquireNextImage2KHR( VkDevice device, const VkAcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkAcquireNextImage2KHR( device, pAcquireInfo, pImageIndex );
     }
 
-    StructureChain(StructureChain const &rhs)
+    VkResult vkAcquireNextImageKHR( VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t* pImageIndex ) const VULKAN_HPP_NOEXCEPT
     {
-      linkAndCopy<void, StructureElements...>(rhs);
+      return ::vkAcquireNextImageKHR( device, swapchain, timeout, semaphore, fence, pImageIndex );
     }
 
-    StructureChain(StructureElements const &... elems)
+    VkResult vkAcquirePerformanceConfigurationINTEL( VkDevice device, const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, VkPerformanceConfigurationINTEL* pConfiguration ) const VULKAN_HPP_NOEXCEPT
     {
-      linkAndCopyElements<void, StructureElements...>(elems...);
+      return ::vkAcquirePerformanceConfigurationINTEL( device, pAcquireInfo, pConfiguration );
     }
 
-    StructureChain& operator=(StructureChain const &rhs)
+    VkResult vkAcquireProfilingLockKHR( VkDevice device, const VkAcquireProfilingLockInfoKHR* pInfo ) const VULKAN_HPP_NOEXCEPT
     {
-      linkAndCopy<void, StructureElements...>(rhs);
-      return *this;
+      return ::vkAcquireProfilingLockKHR( device, pInfo );
     }
 
-    template<typename ClassType> ClassType& get() { return static_cast<ClassType&>(*this);}
-
-  private:
-    template<typename List, typename X>
-    void link()
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+    VkResult vkAcquireXlibDisplayEXT( VkPhysicalDevice physicalDevice, Display* dpy, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT
     {
-      static_assert(extendCheck<List, X>::valid, "The structure chain is not valid!");
+      return ::vkAcquireXlibDisplayEXT( physicalDevice, dpy, display );
+    }
+#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
+
+    VkResult vkAllocateCommandBuffers( VkDevice device, const VkCommandBufferAllocateInfo* pAllocateInfo, VkCommandBuffer* pCommandBuffers ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkAllocateCommandBuffers( device, pAllocateInfo, pCommandBuffers );
     }
 
-    template<typename List, typename X, typename Y, typename ...Z>
-    void link()
+    VkResult vkAllocateDescriptorSets( VkDevice device, const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets ) const VULKAN_HPP_NOEXCEPT
     {
-      static_assert(extendCheck<List,X>::valid, "The structure chain is not valid!");
-      X& x = static_cast<X&>(*this);
-      Y& y = static_cast<Y&>(*this);
-      x.pNext = &y;
-      link<TypeList<List, X>, Y, Z...>();
+      return ::vkAllocateDescriptorSets( device, pAllocateInfo, pDescriptorSets );
     }
 
-    template<typename List, typename X>
-    void linkAndCopy(StructureChain const &rhs)
+    VkResult vkAllocateMemory( VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo, const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory ) const VULKAN_HPP_NOEXCEPT
     {
-      static_assert(extendCheck<List, X>::valid, "The structure chain is not valid!");
-      static_cast<X&>(*this) = static_cast<X const &>(rhs);
+      return ::vkAllocateMemory( device, pAllocateInfo, pAllocator, pMemory );
     }
 
-    template<typename List, typename X, typename Y, typename ...Z>
-    void linkAndCopy(StructureChain const &rhs)
+    VkResult vkBeginCommandBuffer( VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo* pBeginInfo ) const VULKAN_HPP_NOEXCEPT
     {
-      static_assert(extendCheck<List, X>::valid, "The structure chain is not valid!");
-      X& x = static_cast<X&>(*this);
-      Y& y = static_cast<Y&>(*this);
-      x = static_cast<X const &>(rhs);
-      x.pNext = &y;
-      linkAndCopy<TypeList<List, X>, Y, Z...>(rhs);
+      return ::vkBeginCommandBuffer( commandBuffer, pBeginInfo );
     }
 
-    template<typename List, typename X>
-    void linkAndCopyElements(X const &xelem)
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VkResult vkBindAccelerationStructureMemoryKHR( VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoKHR* pBindInfos ) const VULKAN_HPP_NOEXCEPT
     {
-      static_assert(extendCheck<List, X>::valid, "The structure chain is not valid!");
-      static_cast<X&>(*this) = xelem;
+      return ::vkBindAccelerationStructureMemoryKHR( device, bindInfoCount, pBindInfos );
+    }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+    VkResult vkBindAccelerationStructureMemoryNV( VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoKHR* pBindInfos ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkBindAccelerationStructureMemoryNV( device, bindInfoCount, pBindInfos );
     }
 
-    template<typename List, typename X, typename Y, typename ...Z>
-    void linkAndCopyElements(X const &xelem, Y const &yelem, Z const &... zelem)
+    VkResult vkBindBufferMemory( VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset ) const VULKAN_HPP_NOEXCEPT
     {
-      static_assert(extendCheck<List, X>::valid, "The structure chain is not valid!");
-      X& x = static_cast<X&>(*this);
-      Y& y = static_cast<Y&>(*this);
-      x = xelem;
-      x.pNext = &y;
-      linkAndCopyElements<TypeList<List, X>, Y, Z...>(yelem, zelem...);
+      return ::vkBindBufferMemory( device, buffer, memory, memoryOffset );
     }
-  };
 
-  enum class Result
-  {
-    eSuccess = VK_SUCCESS,
-    eNotReady = VK_NOT_READY,
-    eTimeout = VK_TIMEOUT,
-    eEventSet = VK_EVENT_SET,
-    eEventReset = VK_EVENT_RESET,
-    eIncomplete = VK_INCOMPLETE,
-    eErrorOutOfHostMemory = VK_ERROR_OUT_OF_HOST_MEMORY,
-    eErrorOutOfDeviceMemory = VK_ERROR_OUT_OF_DEVICE_MEMORY,
-    eErrorInitializationFailed = VK_ERROR_INITIALIZATION_FAILED,
-    eErrorDeviceLost = VK_ERROR_DEVICE_LOST,
-    eErrorMemoryMapFailed = VK_ERROR_MEMORY_MAP_FAILED,
-    eErrorLayerNotPresent = VK_ERROR_LAYER_NOT_PRESENT,
-    eErrorExtensionNotPresent = VK_ERROR_EXTENSION_NOT_PRESENT,
-    eErrorFeatureNotPresent = VK_ERROR_FEATURE_NOT_PRESENT,
-    eErrorIncompatibleDriver = VK_ERROR_INCOMPATIBLE_DRIVER,
-    eErrorTooManyObjects = VK_ERROR_TOO_MANY_OBJECTS,
-    eErrorFormatNotSupported = VK_ERROR_FORMAT_NOT_SUPPORTED,
-    eErrorFragmentedPool = VK_ERROR_FRAGMENTED_POOL,
-    eErrorOutOfPoolMemory = VK_ERROR_OUT_OF_POOL_MEMORY,
-    eErrorOutOfPoolMemoryKHR = VK_ERROR_OUT_OF_POOL_MEMORY,
-    eErrorInvalidExternalHandle = VK_ERROR_INVALID_EXTERNAL_HANDLE,
-    eErrorInvalidExternalHandleKHR = VK_ERROR_INVALID_EXTERNAL_HANDLE,
-    eErrorSurfaceLostKHR = VK_ERROR_SURFACE_LOST_KHR,
-    eErrorNativeWindowInUseKHR = VK_ERROR_NATIVE_WINDOW_IN_USE_KHR,
-    eSuboptimalKHR = VK_SUBOPTIMAL_KHR,
-    eErrorOutOfDateKHR = VK_ERROR_OUT_OF_DATE_KHR,
-    eErrorIncompatibleDisplayKHR = VK_ERROR_INCOMPATIBLE_DISPLAY_KHR,
-    eErrorValidationFailedEXT = VK_ERROR_VALIDATION_FAILED_EXT,
-    eErrorInvalidShaderNV = VK_ERROR_INVALID_SHADER_NV,
-    eErrorInvalidDrmFormatModifierPlaneLayoutEXT = VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT,
-    eErrorFragmentationEXT = VK_ERROR_FRAGMENTATION_EXT,
-    eErrorNotPermittedEXT = VK_ERROR_NOT_PERMITTED_EXT
-  };
-
-  VULKAN_HPP_INLINE std::string to_string(Result value)
-  {
-    switch (value)
+    VkResult vkBindBufferMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos ) const VULKAN_HPP_NOEXCEPT
     {
-    case Result::eSuccess: return "Success";
-    case Result::eNotReady: return "NotReady";
-    case Result::eTimeout: return "Timeout";
-    case Result::eEventSet: return "EventSet";
-    case Result::eEventReset: return "EventReset";
-    case Result::eIncomplete: return "Incomplete";
-    case Result::eErrorOutOfHostMemory: return "ErrorOutOfHostMemory";
-    case Result::eErrorOutOfDeviceMemory: return "ErrorOutOfDeviceMemory";
-    case Result::eErrorInitializationFailed: return "ErrorInitializationFailed";
-    case Result::eErrorDeviceLost: return "ErrorDeviceLost";
-    case Result::eErrorMemoryMapFailed: return "ErrorMemoryMapFailed";
-    case Result::eErrorLayerNotPresent: return "ErrorLayerNotPresent";
-    case Result::eErrorExtensionNotPresent: return "ErrorExtensionNotPresent";
-    case Result::eErrorFeatureNotPresent: return "ErrorFeatureNotPresent";
-    case Result::eErrorIncompatibleDriver: return "ErrorIncompatibleDriver";
-    case Result::eErrorTooManyObjects: return "ErrorTooManyObjects";
-    case Result::eErrorFormatNotSupported: return "ErrorFormatNotSupported";
-    case Result::eErrorFragmentedPool: return "ErrorFragmentedPool";
-    case Result::eErrorOutOfPoolMemory: return "ErrorOutOfPoolMemory";
-    case Result::eErrorInvalidExternalHandle: return "ErrorInvalidExternalHandle";
-    case Result::eErrorSurfaceLostKHR: return "ErrorSurfaceLostKHR";
-    case Result::eErrorNativeWindowInUseKHR: return "ErrorNativeWindowInUseKHR";
-    case Result::eSuboptimalKHR: return "SuboptimalKHR";
-    case Result::eErrorOutOfDateKHR: return "ErrorOutOfDateKHR";
-    case Result::eErrorIncompatibleDisplayKHR: return "ErrorIncompatibleDisplayKHR";
-    case Result::eErrorValidationFailedEXT: return "ErrorValidationFailedEXT";
-    case Result::eErrorInvalidShaderNV: return "ErrorInvalidShaderNV";
-    case Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT: return "ErrorInvalidDrmFormatModifierPlaneLayoutEXT";
-    case Result::eErrorFragmentationEXT: return "ErrorFragmentationEXT";
-    case Result::eErrorNotPermittedEXT: return "ErrorNotPermittedEXT";
-    default: return "invalid";
+      return ::vkBindBufferMemory2( device, bindInfoCount, pBindInfos );
     }
-  }
 
-#ifndef VULKAN_HPP_NO_EXCEPTIONS
-#if defined(_MSC_VER) && (_MSC_VER == 1800)
-# define noexcept _NOEXCEPT
-#endif
-
-  class ErrorCategoryImpl : public std::error_category
-  {
-    public:
-    virtual const char* name() const noexcept override { return VULKAN_HPP_NAMESPACE_STRING"::Result"; }
-    virtual std::string message(int ev) const override { return to_string(static_cast<Result>(ev)); }
-  };
-
-#if defined(_MSC_VER) && (_MSC_VER == 1800)
-# undef noexcept
-#endif
-
-  VULKAN_HPP_INLINE const std::error_category& errorCategory()
-  {
-    static ErrorCategoryImpl instance;
-    return instance;
-  }
-
-  VULKAN_HPP_INLINE std::error_code make_error_code(Result e)
-  {
-    return std::error_code(static_cast<int>(e), errorCategory());
-  }
-
-  VULKAN_HPP_INLINE std::error_condition make_error_condition(Result e)
-  {
-    return std::error_condition(static_cast<int>(e), errorCategory());
-  }
-
-#if defined(_MSC_VER) && (_MSC_VER == 1800)
-# define noexcept _NOEXCEPT
-#endif
-
-  class Error
-  {
-    public:
-    virtual ~Error() = default;
-
-    virtual const char* what() const noexcept = 0;
-  };
-
-  class LogicError : public Error, public std::logic_error
-  {
-    public:
-    explicit LogicError( const std::string& what )
-      : Error(), std::logic_error(what) {}
-    explicit LogicError( char const * what )
-      : Error(), std::logic_error(what) {}
-    virtual ~LogicError() = default;
-
-    virtual const char* what() const noexcept { return std::logic_error::what(); }
-  };
-
-  class SystemError : public Error, public std::system_error
-  {
-    public:
-    SystemError( std::error_code ec )
-      : Error(), std::system_error(ec) {}
-    SystemError( std::error_code ec, std::string const& what )
-      : Error(), std::system_error(ec, what) {}
-    SystemError( std::error_code ec, char const * what )
-      : Error(), std::system_error(ec, what) {}
-    SystemError( int ev, std::error_category const& ecat )
-      : Error(), std::system_error(ev, ecat) {}
-    SystemError( int ev, std::error_category const& ecat, std::string const& what)
-      : Error(), std::system_error(ev, ecat, what) {}
-    SystemError( int ev, std::error_category const& ecat, char const * what)
-      : Error(), std::system_error(ev, ecat, what) {}
-    virtual ~SystemError() = default;
-
-    virtual const char* what() const noexcept { return std::system_error::what(); }
-  };
-
-#if defined(_MSC_VER) && (_MSC_VER == 1800)
-# undef noexcept
-#endif
-
-  class OutOfHostMemoryError : public SystemError
-  {
-  public:
-    OutOfHostMemoryError( std::string const& message )
-      : SystemError( make_error_code( Result::eErrorOutOfHostMemory ), message ) {}
-    OutOfHostMemoryError( char const * message )
-      : SystemError( make_error_code( Result::eErrorOutOfHostMemory ), message ) {}
-  };
-  class OutOfDeviceMemoryError : public SystemError
-  {
-  public:
-    OutOfDeviceMemoryError( std::string const& message )
-      : SystemError( make_error_code( Result::eErrorOutOfDeviceMemory ), message ) {}
-    OutOfDeviceMemoryError( char const * message )
-      : SystemError( make_error_code( Result::eErrorOutOfDeviceMemory ), message ) {}
-  };
-  class InitializationFailedError : public SystemError
-  {
-  public:
-    InitializationFailedError( std::string const& message )
-      : SystemError( make_error_code( Result::eErrorInitializationFailed ), message ) {}
-    InitializationFailedError( char const * message )
-      : SystemError( make_error_code( Result::eErrorInitializationFailed ), message ) {}
-  };
-  class DeviceLostError : public SystemError
-  {
-  public:
-    DeviceLostError( std::string const& message )
-      : SystemError( make_error_code( Result::eErrorDeviceLost ), message ) {}
-    DeviceLostError( char const * message )
-      : SystemError( make_error_code( Result::eErrorDeviceLost ), message ) {}
-  };
-  class MemoryMapFailedError : public SystemError
-  {
-  public:
-    MemoryMapFailedError( std::string const& message )
-      : SystemError( make_error_code( Result::eErrorMemoryMapFailed ), message ) {}
-    MemoryMapFailedError( char const * message )
-      : SystemError( make_error_code( Result::eErrorMemoryMapFailed ), message ) {}
-  };
-  class LayerNotPresentError : public SystemError
-  {
-  public:
-    LayerNotPresentError( std::string const& message )
-      : SystemError( make_error_code( Result::eErrorLayerNotPresent ), message ) {}
-    LayerNotPresentError( char const * message )
-      : SystemError( make_error_code( Result::eErrorLayerNotPresent ), message ) {}
-  };
-  class ExtensionNotPresentError : public SystemError
-  {
-  public:
-    ExtensionNotPresentError( std::string const& message )
-      : SystemError( make_error_code( Result::eErrorExtensionNotPresent ), message ) {}
-    ExtensionNotPresentError( char const * message )
-      : SystemError( make_error_code( Result::eErrorExtensionNotPresent ), message ) {}
-  };
-  class FeatureNotPresentError : public SystemError
-  {
-  public:
-    FeatureNotPresentError( std::string const& message )
-      : SystemError( make_error_code( Result::eErrorFeatureNotPresent ), message ) {}
-    FeatureNotPresentError( char const * message )
-      : SystemError( make_error_code( Result::eErrorFeatureNotPresent ), message ) {}
-  };
-  class IncompatibleDriverError : public SystemError
-  {
-  public:
-    IncompatibleDriverError( std::string const& message )
-      : SystemError( make_error_code( Result::eErrorIncompatibleDriver ), message ) {}
-    IncompatibleDriverError( char const * message )
-      : SystemError( make_error_code( Result::eErrorIncompatibleDriver ), message ) {}
-  };
-  class TooManyObjectsError : public SystemError
-  {
-  public:
-    TooManyObjectsError( std::string const& message )
-      : SystemError( make_error_code( Result::eErrorTooManyObjects ), message ) {}
-    TooManyObjectsError( char const * message )
-      : SystemError( make_error_code( Result::eErrorTooManyObjects ), message ) {}
-  };
-  class FormatNotSupportedError : public SystemError
-  {
-  public:
-    FormatNotSupportedError( std::string const& message )
-      : SystemError( make_error_code( Result::eErrorFormatNotSupported ), message ) {}
-    FormatNotSupportedError( char const * message )
-      : SystemError( make_error_code( Result::eErrorFormatNotSupported ), message ) {}
-  };
-  class FragmentedPoolError : public SystemError
-  {
-  public:
-    FragmentedPoolError( std::string const& message )
-      : SystemError( make_error_code( Result::eErrorFragmentedPool ), message ) {}
-    FragmentedPoolError( char const * message )
-      : SystemError( make_error_code( Result::eErrorFragmentedPool ), message ) {}
-  };
-  class OutOfPoolMemoryError : public SystemError
-  {
-  public:
-    OutOfPoolMemoryError( std::string const& message )
-      : SystemError( make_error_code( Result::eErrorOutOfPoolMemory ), message ) {}
-    OutOfPoolMemoryError( char const * message )
-      : SystemError( make_error_code( Result::eErrorOutOfPoolMemory ), message ) {}
-  };
-  class InvalidExternalHandleError : public SystemError
-  {
-  public:
-    InvalidExternalHandleError( std::string const& message )
-      : SystemError( make_error_code( Result::eErrorInvalidExternalHandle ), message ) {}
-    InvalidExternalHandleError( char const * message )
-      : SystemError( make_error_code( Result::eErrorInvalidExternalHandle ), message ) {}
-  };
-  class SurfaceLostKHRError : public SystemError
-  {
-  public:
-    SurfaceLostKHRError( std::string const& message )
-      : SystemError( make_error_code( Result::eErrorSurfaceLostKHR ), message ) {}
-    SurfaceLostKHRError( char const * message )
-      : SystemError( make_error_code( Result::eErrorSurfaceLostKHR ), message ) {}
-  };
-  class NativeWindowInUseKHRError : public SystemError
-  {
-  public:
-    NativeWindowInUseKHRError( std::string const& message )
-      : SystemError( make_error_code( Result::eErrorNativeWindowInUseKHR ), message ) {}
-    NativeWindowInUseKHRError( char const * message )
-      : SystemError( make_error_code( Result::eErrorNativeWindowInUseKHR ), message ) {}
-  };
-  class OutOfDateKHRError : public SystemError
-  {
-  public:
-    OutOfDateKHRError( std::string const& message )
-      : SystemError( make_error_code( Result::eErrorOutOfDateKHR ), message ) {}
-    OutOfDateKHRError( char const * message )
-      : SystemError( make_error_code( Result::eErrorOutOfDateKHR ), message ) {}
-  };
-  class IncompatibleDisplayKHRError : public SystemError
-  {
-  public:
-    IncompatibleDisplayKHRError( std::string const& message )
-      : SystemError( make_error_code( Result::eErrorIncompatibleDisplayKHR ), message ) {}
-    IncompatibleDisplayKHRError( char const * message )
-      : SystemError( make_error_code( Result::eErrorIncompatibleDisplayKHR ), message ) {}
-  };
-  class ValidationFailedEXTError : public SystemError
-  {
-  public:
-    ValidationFailedEXTError( std::string const& message )
-      : SystemError( make_error_code( Result::eErrorValidationFailedEXT ), message ) {}
-    ValidationFailedEXTError( char const * message )
-      : SystemError( make_error_code( Result::eErrorValidationFailedEXT ), message ) {}
-  };
-  class InvalidShaderNVError : public SystemError
-  {
-  public:
-    InvalidShaderNVError( std::string const& message )
-      : SystemError( make_error_code( Result::eErrorInvalidShaderNV ), message ) {}
-    InvalidShaderNVError( char const * message )
-      : SystemError( make_error_code( Result::eErrorInvalidShaderNV ), message ) {}
-  };
-  class InvalidDrmFormatModifierPlaneLayoutEXTError : public SystemError
-  {
-  public:
-    InvalidDrmFormatModifierPlaneLayoutEXTError( std::string const& message )
-      : SystemError( make_error_code( Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT ), message ) {}
-    InvalidDrmFormatModifierPlaneLayoutEXTError( char const * message )
-      : SystemError( make_error_code( Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT ), message ) {}
-  };
-  class FragmentationEXTError : public SystemError
-  {
-  public:
-    FragmentationEXTError( std::string const& message )
-      : SystemError( make_error_code( Result::eErrorFragmentationEXT ), message ) {}
-    FragmentationEXTError( char const * message )
-      : SystemError( make_error_code( Result::eErrorFragmentationEXT ), message ) {}
-  };
-  class NotPermittedEXTError : public SystemError
-  {
-  public:
-    NotPermittedEXTError( std::string const& message )
-      : SystemError( make_error_code( Result::eErrorNotPermittedEXT ), message ) {}
-    NotPermittedEXTError( char const * message )
-      : SystemError( make_error_code( Result::eErrorNotPermittedEXT ), message ) {}
-  };
-
-  VULKAN_HPP_INLINE void throwResultException( Result result, char const * message )
-  {
-    switch ( result )
+    VkResult vkBindBufferMemory2KHR( VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos ) const VULKAN_HPP_NOEXCEPT
     {
-    case Result::eErrorOutOfHostMemory: throw OutOfHostMemoryError ( message );
-    case Result::eErrorOutOfDeviceMemory: throw OutOfDeviceMemoryError ( message );
-    case Result::eErrorInitializationFailed: throw InitializationFailedError ( message );
-    case Result::eErrorDeviceLost: throw DeviceLostError ( message );
-    case Result::eErrorMemoryMapFailed: throw MemoryMapFailedError ( message );
-    case Result::eErrorLayerNotPresent: throw LayerNotPresentError ( message );
-    case Result::eErrorExtensionNotPresent: throw ExtensionNotPresentError ( message );
-    case Result::eErrorFeatureNotPresent: throw FeatureNotPresentError ( message );
-    case Result::eErrorIncompatibleDriver: throw IncompatibleDriverError ( message );
-    case Result::eErrorTooManyObjects: throw TooManyObjectsError ( message );
-    case Result::eErrorFormatNotSupported: throw FormatNotSupportedError ( message );
-    case Result::eErrorFragmentedPool: throw FragmentedPoolError ( message );
-    case Result::eErrorOutOfPoolMemory: throw OutOfPoolMemoryError ( message );
-    case Result::eErrorInvalidExternalHandle: throw InvalidExternalHandleError ( message );
-    case Result::eErrorSurfaceLostKHR: throw SurfaceLostKHRError ( message );
-    case Result::eErrorNativeWindowInUseKHR: throw NativeWindowInUseKHRError ( message );
-    case Result::eErrorOutOfDateKHR: throw OutOfDateKHRError ( message );
-    case Result::eErrorIncompatibleDisplayKHR: throw IncompatibleDisplayKHRError ( message );
-    case Result::eErrorValidationFailedEXT: throw ValidationFailedEXTError ( message );
-    case Result::eErrorInvalidShaderNV: throw InvalidShaderNVError ( message );
-    case Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT: throw InvalidDrmFormatModifierPlaneLayoutEXTError ( message );
-    case Result::eErrorFragmentationEXT: throw FragmentationEXTError ( message );
-    case Result::eErrorNotPermittedEXT: throw NotPermittedEXTError ( message );
-    default: throw SystemError( make_error_code( result ) );
+      return ::vkBindBufferMemory2KHR( device, bindInfoCount, pBindInfos );
     }
-  }
-#endif
-} // namespace VULKAN_HPP_NAMESPACE
 
-namespace std
-{
-  template <>
-  struct is_error_code_enum<VULKAN_HPP_NAMESPACE::Result> : public true_type
-  {};
-}
-
-namespace VULKAN_HPP_NAMESPACE
-{
-
-  template <typename T>
-  struct ResultValue
-  {
-    ResultValue( Result r, T & v )
-      : result( r )
-      , value( v )
-    {}
-
-    ResultValue( Result r, T && v )
-      : result( r )
-      , value( std::move( v ) )
-    {}
-
-    Result  result;
-    T       value;
-
-    operator std::tuple<Result&, T&>() { return std::tuple<Result&, T&>(result, value); }
-  };
-
-  template <typename T>
-  struct ResultValueType
-  {
-#ifdef VULKAN_HPP_NO_EXCEPTIONS
-    typedef ResultValue<T>  type;
-#else
-    typedef T               type;
-#endif
-  };
-
-  template <>
-  struct ResultValueType<void>
-  {
-#ifdef VULKAN_HPP_NO_EXCEPTIONS
-    typedef Result type;
-#else
-    typedef void   type;
-#endif
-  };
-
-  VULKAN_HPP_INLINE ResultValueType<void>::type createResultValue( Result result, char const * message )
-  {
-#ifdef VULKAN_HPP_NO_EXCEPTIONS
-    VULKAN_HPP_ASSERT( result == Result::eSuccess );
-    return result;
-#else
-    if ( result != Result::eSuccess )
+    VkResult vkBindImageMemory( VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset ) const VULKAN_HPP_NOEXCEPT
     {
-      throwResultException( result, message );
+      return ::vkBindImageMemory( device, image, memory, memoryOffset );
     }
-#endif
-  }
 
-  template <typename T>
-  VULKAN_HPP_INLINE typename ResultValueType<T>::type createResultValue( Result result, T & data, char const * message )
-  {
-#ifdef VULKAN_HPP_NO_EXCEPTIONS
-    VULKAN_HPP_ASSERT( result == Result::eSuccess );
-    return ResultValue<T>( result, data );
-#else
-    if ( result != Result::eSuccess )
+    VkResult vkBindImageMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos ) const VULKAN_HPP_NOEXCEPT
     {
-      throwResultException( result, message );
+      return ::vkBindImageMemory2( device, bindInfoCount, pBindInfos );
     }
-    return std::move( data );
-#endif
-  }
 
-  VULKAN_HPP_INLINE Result createResultValue( Result result, char const * message, std::initializer_list<Result> successCodes )
-  {
-#ifdef VULKAN_HPP_NO_EXCEPTIONS
-    VULKAN_HPP_ASSERT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() );
-#else
-    if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() )
+    VkResult vkBindImageMemory2KHR( VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos ) const VULKAN_HPP_NOEXCEPT
     {
-      throwResultException( result, message );
+      return ::vkBindImageMemory2KHR( device, bindInfoCount, pBindInfos );
     }
-#endif
-    return result;
-  }
 
-  template <typename T>
-  VULKAN_HPP_INLINE ResultValue<T> createResultValue( Result result, T & data, char const * message, std::initializer_list<Result> successCodes )
-  {
-#ifdef VULKAN_HPP_NO_EXCEPTIONS
-    VULKAN_HPP_ASSERT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() );
-#else
-    if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() )
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VkResult vkBuildAccelerationStructureKHR( VkDevice device, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, const VkAccelerationStructureBuildOffsetInfoKHR* const * ppOffsetInfos ) const VULKAN_HPP_NOEXCEPT
     {
-      throwResultException( result, message );
+      return ::vkBuildAccelerationStructureKHR( device, infoCount, pInfos, ppOffsetInfos );
     }
-#endif
-    return ResultValue<T>( result, data );
-  }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
 
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-  template <typename T, typename D>
-  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<T,D>>::type createResultValue( Result result, T & data, char const * message, typename UniqueHandleTraits<T,D>::deleter const& deleter )
-  {
-#ifdef VULKAN_HPP_NO_EXCEPTIONS
-    VULKAN_HPP_ASSERT( result == Result::eSuccess );
-    return ResultValue<UniqueHandle<T,D>>( result, UniqueHandle<T,D>(data, deleter) );
-#else
-    if ( result != Result::eSuccess )
+    void vkCmdBeginConditionalRenderingEXT( VkCommandBuffer commandBuffer, const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin ) const VULKAN_HPP_NOEXCEPT
     {
-      throwResultException( result, message );
+      return ::vkCmdBeginConditionalRenderingEXT( commandBuffer, pConditionalRenderingBegin );
     }
-    return UniqueHandle<T,D>(data, deleter);
-#endif
-  }
-#endif
 
-class DispatchLoaderStatic
-{
-public:
-  VkResult vkAcquireNextImage2KHR( VkDevice device, const VkAcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex  ) const
-  {
-    return ::vkAcquireNextImage2KHR( device, pAcquireInfo, pImageIndex);
-  }
-  VkResult vkAcquireNextImageKHR( VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t* pImageIndex  ) const
-  {
-    return ::vkAcquireNextImageKHR( device, swapchain, timeout, semaphore, fence, pImageIndex);
-  }
-#ifdef VK_USE_PLATFORM_XLIB_XRANDR_NV
-  VkResult vkAcquireXlibDisplayEXT( VkPhysicalDevice physicalDevice, Display* dpy, VkDisplayKHR display  ) const
-  {
-    return ::vkAcquireXlibDisplayEXT( physicalDevice, dpy, display);
-  }
-#endif /*VK_USE_PLATFORM_XLIB_XRANDR_NV*/
-  VkResult vkAllocateCommandBuffers( VkDevice device, const VkCommandBufferAllocateInfo* pAllocateInfo, VkCommandBuffer* pCommandBuffers  ) const
-  {
-    return ::vkAllocateCommandBuffers( device, pAllocateInfo, pCommandBuffers);
-  }
-  VkResult vkAllocateDescriptorSets( VkDevice device, const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets  ) const
-  {
-    return ::vkAllocateDescriptorSets( device, pAllocateInfo, pDescriptorSets);
-  }
-  VkResult vkAllocateMemory( VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo, const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory  ) const
-  {
-    return ::vkAllocateMemory( device, pAllocateInfo, pAllocator, pMemory);
-  }
-  VkResult vkBeginCommandBuffer( VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo* pBeginInfo  ) const
-  {
-    return ::vkBeginCommandBuffer( commandBuffer, pBeginInfo);
-  }
-  VkResult vkBindAccelerationStructureMemoryNV( VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV* pBindInfos  ) const
-  {
-    return ::vkBindAccelerationStructureMemoryNV( device, bindInfoCount, pBindInfos);
-  }
-  VkResult vkBindBufferMemory( VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset  ) const
-  {
-    return ::vkBindBufferMemory( device, buffer, memory, memoryOffset);
-  }
-  VkResult vkBindBufferMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos  ) const
-  {
-    return ::vkBindBufferMemory2( device, bindInfoCount, pBindInfos);
-  }
-  VkResult vkBindBufferMemory2KHR( VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos  ) const
-  {
-    return ::vkBindBufferMemory2KHR( device, bindInfoCount, pBindInfos);
-  }
-  VkResult vkBindImageMemory( VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset  ) const
-  {
-    return ::vkBindImageMemory( device, image, memory, memoryOffset);
-  }
-  VkResult vkBindImageMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos  ) const
-  {
-    return ::vkBindImageMemory2( device, bindInfoCount, pBindInfos);
-  }
-  VkResult vkBindImageMemory2KHR( VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos  ) const
-  {
-    return ::vkBindImageMemory2KHR( device, bindInfoCount, pBindInfos);
-  }
-  void vkCmdBeginConditionalRenderingEXT( VkCommandBuffer commandBuffer, const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin  ) const
-  {
-    return ::vkCmdBeginConditionalRenderingEXT( commandBuffer, pConditionalRenderingBegin);
-  }
-  void vkCmdBeginDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo  ) const
-  {
-    return ::vkCmdBeginDebugUtilsLabelEXT( commandBuffer, pLabelInfo);
-  }
-  void vkCmdBeginQuery( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags  ) const
-  {
-    return ::vkCmdBeginQuery( commandBuffer, queryPool, query, flags);
-  }
-  void vkCmdBeginQueryIndexedEXT( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags, uint32_t index  ) const
-  {
-    return ::vkCmdBeginQueryIndexedEXT( commandBuffer, queryPool, query, flags, index);
-  }
-  void vkCmdBeginRenderPass( VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, VkSubpassContents contents  ) const
-  {
-    return ::vkCmdBeginRenderPass( commandBuffer, pRenderPassBegin, contents);
-  }
-  void vkCmdBeginRenderPass2KHR( VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, const VkSubpassBeginInfoKHR* pSubpassBeginInfo  ) const
-  {
-    return ::vkCmdBeginRenderPass2KHR( commandBuffer, pRenderPassBegin, pSubpassBeginInfo);
-  }
-  void vkCmdBeginTransformFeedbackEXT( VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets  ) const
-  {
-    return ::vkCmdBeginTransformFeedbackEXT( commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets);
-  }
-  void vkCmdBindDescriptorSets( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets  ) const
-  {
-    return ::vkCmdBindDescriptorSets( commandBuffer, pipelineBindPoint, layout, firstSet, descriptorSetCount, pDescriptorSets, dynamicOffsetCount, pDynamicOffsets);
-  }
-  void vkCmdBindIndexBuffer( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType  ) const
-  {
-    return ::vkCmdBindIndexBuffer( commandBuffer, buffer, offset, indexType);
-  }
-  void vkCmdBindPipeline( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline  ) const
-  {
-    return ::vkCmdBindPipeline( commandBuffer, pipelineBindPoint, pipeline);
-  }
-  void vkCmdBindShadingRateImageNV( VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout  ) const
-  {
-    return ::vkCmdBindShadingRateImageNV( commandBuffer, imageView, imageLayout);
-  }
-  void vkCmdBindTransformFeedbackBuffersEXT( VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes  ) const
-  {
-    return ::vkCmdBindTransformFeedbackBuffersEXT( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes);
-  }
-  void vkCmdBindVertexBuffers( VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets  ) const
-  {
-    return ::vkCmdBindVertexBuffers( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets);
-  }
-  void vkCmdBlitImage( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit* pRegions, VkFilter filter  ) const
-  {
-    return ::vkCmdBlitImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter);
-  }
-  void vkCmdBuildAccelerationStructureNV( VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV* pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset, VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset  ) const
-  {
-    return ::vkCmdBuildAccelerationStructureNV( commandBuffer, pInfo, instanceData, instanceOffset, update, dst, src, scratch, scratchOffset);
-  }
-  void vkCmdClearAttachments( VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkClearAttachment* pAttachments, uint32_t rectCount, const VkClearRect* pRects  ) const
-  {
-    return ::vkCmdClearAttachments( commandBuffer, attachmentCount, pAttachments, rectCount, pRects);
-  }
-  void vkCmdClearColorImage( VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue* pColor, uint32_t rangeCount, const VkImageSubresourceRange* pRanges  ) const
-  {
-    return ::vkCmdClearColorImage( commandBuffer, image, imageLayout, pColor, rangeCount, pRanges);
-  }
-  void vkCmdClearDepthStencilImage( VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange* pRanges  ) const
-  {
-    return ::vkCmdClearDepthStencilImage( commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges);
-  }
-  void vkCmdCopyAccelerationStructureNV( VkCommandBuffer commandBuffer, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkCopyAccelerationStructureModeNV mode  ) const
-  {
-    return ::vkCmdCopyAccelerationStructureNV( commandBuffer, dst, src, mode);
-  }
-  void vkCmdCopyBuffer( VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy* pRegions  ) const
-  {
-    return ::vkCmdCopyBuffer( commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions);
-  }
-  void vkCmdCopyBufferToImage( VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy* pRegions  ) const
-  {
-    return ::vkCmdCopyBufferToImage( commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions);
-  }
-  void vkCmdCopyImage( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy* pRegions  ) const
-  {
-    return ::vkCmdCopyImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
-  }
-  void vkCmdCopyImageToBuffer( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy* pRegions  ) const
-  {
-    return ::vkCmdCopyImageToBuffer( commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions);
-  }
-  void vkCmdCopyQueryPoolResults( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride, VkQueryResultFlags flags  ) const
-  {
-    return ::vkCmdCopyQueryPoolResults( commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags);
-  }
-  void vkCmdDebugMarkerBeginEXT( VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo  ) const
-  {
-    return ::vkCmdDebugMarkerBeginEXT( commandBuffer, pMarkerInfo);
-  }
-  void vkCmdDebugMarkerEndEXT( VkCommandBuffer commandBuffer  ) const
-  {
-    return ::vkCmdDebugMarkerEndEXT( commandBuffer);
-  }
-  void vkCmdDebugMarkerInsertEXT( VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo  ) const
-  {
-    return ::vkCmdDebugMarkerInsertEXT( commandBuffer, pMarkerInfo);
-  }
-  void vkCmdDispatch( VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ  ) const
-  {
-    return ::vkCmdDispatch( commandBuffer, groupCountX, groupCountY, groupCountZ);
-  }
-  void vkCmdDispatchBase( VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ  ) const
-  {
-    return ::vkCmdDispatchBase( commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ);
-  }
-  void vkCmdDispatchBaseKHR( VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ  ) const
-  {
-    return ::vkCmdDispatchBaseKHR( commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ);
-  }
-  void vkCmdDispatchIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset  ) const
-  {
-    return ::vkCmdDispatchIndirect( commandBuffer, buffer, offset);
-  }
-  void vkCmdDraw( VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance  ) const
-  {
-    return ::vkCmdDraw( commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance);
-  }
-  void vkCmdDrawIndexed( VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance  ) const
-  {
-    return ::vkCmdDrawIndexed( commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance);
-  }
-  void vkCmdDrawIndexedIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride  ) const
-  {
-    return ::vkCmdDrawIndexedIndirect( commandBuffer, buffer, offset, drawCount, stride);
-  }
-  void vkCmdDrawIndexedIndirectCountAMD( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride  ) const
-  {
-    return ::vkCmdDrawIndexedIndirectCountAMD( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
-  }
-  void vkCmdDrawIndexedIndirectCountKHR( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride  ) const
-  {
-    return ::vkCmdDrawIndexedIndirectCountKHR( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
-  }
-  void vkCmdDrawIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride  ) const
-  {
-    return ::vkCmdDrawIndirect( commandBuffer, buffer, offset, drawCount, stride);
-  }
-  void vkCmdDrawIndirectByteCountEXT( VkCommandBuffer commandBuffer, uint32_t instanceCount, uint32_t firstInstance, VkBuffer counterBuffer, VkDeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride  ) const
-  {
-    return ::vkCmdDrawIndirectByteCountEXT( commandBuffer, instanceCount, firstInstance, counterBuffer, counterBufferOffset, counterOffset, vertexStride);
-  }
-  void vkCmdDrawIndirectCountAMD( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride  ) const
-  {
-    return ::vkCmdDrawIndirectCountAMD( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
-  }
-  void vkCmdDrawIndirectCountKHR( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride  ) const
-  {
-    return ::vkCmdDrawIndirectCountKHR( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
-  }
-  void vkCmdDrawMeshTasksIndirectCountNV( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride  ) const
-  {
-    return ::vkCmdDrawMeshTasksIndirectCountNV( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
-  }
-  void vkCmdDrawMeshTasksIndirectNV( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride  ) const
-  {
-    return ::vkCmdDrawMeshTasksIndirectNV( commandBuffer, buffer, offset, drawCount, stride);
-  }
-  void vkCmdDrawMeshTasksNV( VkCommandBuffer commandBuffer, uint32_t taskCount, uint32_t firstTask  ) const
-  {
-    return ::vkCmdDrawMeshTasksNV( commandBuffer, taskCount, firstTask);
-  }
-  void vkCmdEndConditionalRenderingEXT( VkCommandBuffer commandBuffer  ) const
-  {
-    return ::vkCmdEndConditionalRenderingEXT( commandBuffer);
-  }
-  void vkCmdEndDebugUtilsLabelEXT( VkCommandBuffer commandBuffer  ) const
-  {
-    return ::vkCmdEndDebugUtilsLabelEXT( commandBuffer);
-  }
-  void vkCmdEndQuery( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query  ) const
-  {
-    return ::vkCmdEndQuery( commandBuffer, queryPool, query);
-  }
-  void vkCmdEndQueryIndexedEXT( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, uint32_t index  ) const
-  {
-    return ::vkCmdEndQueryIndexedEXT( commandBuffer, queryPool, query, index);
-  }
-  void vkCmdEndRenderPass( VkCommandBuffer commandBuffer  ) const
-  {
-    return ::vkCmdEndRenderPass( commandBuffer);
-  }
-  void vkCmdEndRenderPass2KHR( VkCommandBuffer commandBuffer, const VkSubpassEndInfoKHR* pSubpassEndInfo  ) const
-  {
-    return ::vkCmdEndRenderPass2KHR( commandBuffer, pSubpassEndInfo);
-  }
-  void vkCmdEndTransformFeedbackEXT( VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets  ) const
-  {
-    return ::vkCmdEndTransformFeedbackEXT( commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets);
-  }
-  void vkCmdExecuteCommands( VkCommandBuffer commandBuffer, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers  ) const
-  {
-    return ::vkCmdExecuteCommands( commandBuffer, commandBufferCount, pCommandBuffers);
-  }
-  void vkCmdFillBuffer( VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data  ) const
-  {
-    return ::vkCmdFillBuffer( commandBuffer, dstBuffer, dstOffset, size, data);
-  }
-  void vkCmdInsertDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo  ) const
-  {
-    return ::vkCmdInsertDebugUtilsLabelEXT( commandBuffer, pLabelInfo);
-  }
-  void vkCmdNextSubpass( VkCommandBuffer commandBuffer, VkSubpassContents contents  ) const
-  {
-    return ::vkCmdNextSubpass( commandBuffer, contents);
-  }
-  void vkCmdNextSubpass2KHR( VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR* pSubpassBeginInfo, const VkSubpassEndInfoKHR* pSubpassEndInfo  ) const
-  {
-    return ::vkCmdNextSubpass2KHR( commandBuffer, pSubpassBeginInfo, pSubpassEndInfo);
-  }
-  void vkCmdPipelineBarrier( VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers  ) const
-  {
-    return ::vkCmdPipelineBarrier( commandBuffer, srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
-  }
-  void vkCmdProcessCommandsNVX( VkCommandBuffer commandBuffer, const VkCmdProcessCommandsInfoNVX* pProcessCommandsInfo  ) const
-  {
-    return ::vkCmdProcessCommandsNVX( commandBuffer, pProcessCommandsInfo);
-  }
-  void vkCmdPushConstants( VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues  ) const
-  {
-    return ::vkCmdPushConstants( commandBuffer, layout, stageFlags, offset, size, pValues);
-  }
-  void vkCmdPushDescriptorSetKHR( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites  ) const
-  {
-    return ::vkCmdPushDescriptorSetKHR( commandBuffer, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites);
-  }
-  void vkCmdPushDescriptorSetWithTemplateKHR( VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplate descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set, const void* pData  ) const
-  {
-    return ::vkCmdPushDescriptorSetWithTemplateKHR( commandBuffer, descriptorUpdateTemplate, layout, set, pData);
-  }
-  void vkCmdReserveSpaceForCommandsNVX( VkCommandBuffer commandBuffer, const VkCmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo  ) const
-  {
-    return ::vkCmdReserveSpaceForCommandsNVX( commandBuffer, pReserveSpaceInfo);
-  }
-  void vkCmdResetEvent( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask  ) const
-  {
-    return ::vkCmdResetEvent( commandBuffer, event, stageMask);
-  }
-  void vkCmdResetQueryPool( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount  ) const
-  {
-    return ::vkCmdResetQueryPool( commandBuffer, queryPool, firstQuery, queryCount);
-  }
-  void vkCmdResolveImage( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageResolve* pRegions  ) const
-  {
-    return ::vkCmdResolveImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
-  }
-  void vkCmdSetBlendConstants( VkCommandBuffer commandBuffer, const float blendConstants[4]  ) const
-  {
-    return ::vkCmdSetBlendConstants( commandBuffer, blendConstants);
-  }
-  void vkCmdSetCheckpointNV( VkCommandBuffer commandBuffer, const void* pCheckpointMarker  ) const
-  {
-    return ::vkCmdSetCheckpointNV( commandBuffer, pCheckpointMarker);
-  }
-  void vkCmdSetCoarseSampleOrderNV( VkCommandBuffer commandBuffer, VkCoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VkCoarseSampleOrderCustomNV* pCustomSampleOrders  ) const
-  {
-    return ::vkCmdSetCoarseSampleOrderNV( commandBuffer, sampleOrderType, customSampleOrderCount, pCustomSampleOrders);
-  }
-  void vkCmdSetDepthBias( VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor  ) const
-  {
-    return ::vkCmdSetDepthBias( commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor);
-  }
-  void vkCmdSetDepthBounds( VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds  ) const
-  {
-    return ::vkCmdSetDepthBounds( commandBuffer, minDepthBounds, maxDepthBounds);
-  }
-  void vkCmdSetDeviceMask( VkCommandBuffer commandBuffer, uint32_t deviceMask  ) const
-  {
-    return ::vkCmdSetDeviceMask( commandBuffer, deviceMask);
-  }
-  void vkCmdSetDeviceMaskKHR( VkCommandBuffer commandBuffer, uint32_t deviceMask  ) const
-  {
-    return ::vkCmdSetDeviceMaskKHR( commandBuffer, deviceMask);
-  }
-  void vkCmdSetDiscardRectangleEXT( VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VkRect2D* pDiscardRectangles  ) const
-  {
-    return ::vkCmdSetDiscardRectangleEXT( commandBuffer, firstDiscardRectangle, discardRectangleCount, pDiscardRectangles);
-  }
-  void vkCmdSetEvent( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask  ) const
-  {
-    return ::vkCmdSetEvent( commandBuffer, event, stageMask);
-  }
-  void vkCmdSetExclusiveScissorNV( VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VkRect2D* pExclusiveScissors  ) const
-  {
-    return ::vkCmdSetExclusiveScissorNV( commandBuffer, firstExclusiveScissor, exclusiveScissorCount, pExclusiveScissors);
-  }
-  void vkCmdSetLineWidth( VkCommandBuffer commandBuffer, float lineWidth  ) const
-  {
-    return ::vkCmdSetLineWidth( commandBuffer, lineWidth);
-  }
-  void vkCmdSetSampleLocationsEXT( VkCommandBuffer commandBuffer, const VkSampleLocationsInfoEXT* pSampleLocationsInfo  ) const
-  {
-    return ::vkCmdSetSampleLocationsEXT( commandBuffer, pSampleLocationsInfo);
-  }
-  void vkCmdSetScissor( VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D* pScissors  ) const
-  {
-    return ::vkCmdSetScissor( commandBuffer, firstScissor, scissorCount, pScissors);
-  }
-  void vkCmdSetStencilCompareMask( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask  ) const
-  {
-    return ::vkCmdSetStencilCompareMask( commandBuffer, faceMask, compareMask);
-  }
-  void vkCmdSetStencilReference( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference  ) const
-  {
-    return ::vkCmdSetStencilReference( commandBuffer, faceMask, reference);
-  }
-  void vkCmdSetStencilWriteMask( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask  ) const
-  {
-    return ::vkCmdSetStencilWriteMask( commandBuffer, faceMask, writeMask);
-  }
-  void vkCmdSetViewport( VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport* pViewports  ) const
-  {
-    return ::vkCmdSetViewport( commandBuffer, firstViewport, viewportCount, pViewports);
-  }
-  void vkCmdSetViewportShadingRatePaletteNV( VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkShadingRatePaletteNV* pShadingRatePalettes  ) const
-  {
-    return ::vkCmdSetViewportShadingRatePaletteNV( commandBuffer, firstViewport, viewportCount, pShadingRatePalettes);
-  }
-  void vkCmdSetViewportWScalingNV( VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewportWScalingNV* pViewportWScalings  ) const
-  {
-    return ::vkCmdSetViewportWScalingNV( commandBuffer, firstViewport, viewportCount, pViewportWScalings);
-  }
-  void vkCmdTraceRaysNV( VkCommandBuffer commandBuffer, VkBuffer raygenShaderBindingTableBuffer, VkDeviceSize raygenShaderBindingOffset, VkBuffer missShaderBindingTableBuffer, VkDeviceSize missShaderBindingOffset, VkDeviceSize missShaderBindingStride, VkBuffer hitShaderBindingTableBuffer, VkDeviceSize hitShaderBindingOffset, VkDeviceSize hitShaderBindingStride, VkBuffer callableShaderBindingTableBuffer, VkDeviceSize callableShaderBindingOffset, VkDeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth  ) const
-  {
-    return ::vkCmdTraceRaysNV( commandBuffer, raygenShaderBindingTableBuffer, raygenShaderBindingOffset, missShaderBindingTableBuffer, missShaderBindingOffset, missShaderBindingStride, hitShaderBindingTableBuffer, hitShaderBindingOffset, hitShaderBindingStride, callableShaderBindingTableBuffer, callableShaderBindingOffset, callableShaderBindingStride, width, height, depth);
-  }
-  void vkCmdUpdateBuffer( VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void* pData  ) const
-  {
-    return ::vkCmdUpdateBuffer( commandBuffer, dstBuffer, dstOffset, dataSize, pData);
-  }
-  void vkCmdWaitEvents( VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers  ) const
-  {
-    return ::vkCmdWaitEvents( commandBuffer, eventCount, pEvents, srcStageMask, dstStageMask, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
-  }
-  void vkCmdWriteAccelerationStructuresPropertiesNV( VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureNV* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery  ) const
-  {
-    return ::vkCmdWriteAccelerationStructuresPropertiesNV( commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery);
-  }
-  void vkCmdWriteBufferMarkerAMD( VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker  ) const
-  {
-    return ::vkCmdWriteBufferMarkerAMD( commandBuffer, pipelineStage, dstBuffer, dstOffset, marker);
-  }
-  void vkCmdWriteTimestamp( VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t query  ) const
-  {
-    return ::vkCmdWriteTimestamp( commandBuffer, pipelineStage, queryPool, query);
-  }
-  VkResult vkCompileDeferredNV( VkDevice device, VkPipeline pipeline, uint32_t shader  ) const
-  {
-    return ::vkCompileDeferredNV( device, pipeline, shader);
-  }
-  VkResult vkCreateAccelerationStructureNV( VkDevice device, const VkAccelerationStructureCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkAccelerationStructureNV* pAccelerationStructure  ) const
-  {
-    return ::vkCreateAccelerationStructureNV( device, pCreateInfo, pAllocator, pAccelerationStructure);
-  }
+    void vkCmdBeginDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBeginDebugUtilsLabelEXT( commandBuffer, pLabelInfo );
+    }
+
+    void vkCmdBeginQuery( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBeginQuery( commandBuffer, queryPool, query, flags );
+    }
+
+    void vkCmdBeginQueryIndexedEXT( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags, uint32_t index ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBeginQueryIndexedEXT( commandBuffer, queryPool, query, flags, index );
+    }
+
+    void vkCmdBeginRenderPass( VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, VkSubpassContents contents ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBeginRenderPass( commandBuffer, pRenderPassBegin, contents );
+    }
+
+    void vkCmdBeginRenderPass2( VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, const VkSubpassBeginInfo* pSubpassBeginInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBeginRenderPass2( commandBuffer, pRenderPassBegin, pSubpassBeginInfo );
+    }
+
+    void vkCmdBeginRenderPass2KHR( VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, const VkSubpassBeginInfo* pSubpassBeginInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBeginRenderPass2KHR( commandBuffer, pRenderPassBegin, pSubpassBeginInfo );
+    }
+
+    void vkCmdBeginTransformFeedbackEXT( VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBeginTransformFeedbackEXT( commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets );
+    }
+
+    void vkCmdBindDescriptorSets( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBindDescriptorSets( commandBuffer, pipelineBindPoint, layout, firstSet, descriptorSetCount, pDescriptorSets, dynamicOffsetCount, pDynamicOffsets );
+    }
+
+    void vkCmdBindIndexBuffer( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBindIndexBuffer( commandBuffer, buffer, offset, indexType );
+    }
+
+    void vkCmdBindPipeline( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBindPipeline( commandBuffer, pipelineBindPoint, pipeline );
+    }
+
+    void vkCmdBindPipelineShaderGroupNV( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline, uint32_t groupIndex ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBindPipelineShaderGroupNV( commandBuffer, pipelineBindPoint, pipeline, groupIndex );
+    }
+
+    void vkCmdBindShadingRateImageNV( VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBindShadingRateImageNV( commandBuffer, imageView, imageLayout );
+    }
+
+    void vkCmdBindTransformFeedbackBuffersEXT( VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBindTransformFeedbackBuffersEXT( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes );
+    }
+
+    void vkCmdBindVertexBuffers( VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBindVertexBuffers( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets );
+    }
+
+    void vkCmdBindVertexBuffers2EXT( VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes, const VkDeviceSize* pStrides ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBindVertexBuffers2EXT( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes, pStrides );
+    }
+
+    void vkCmdBlitImage( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit* pRegions, VkFilter filter ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBlitImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter );
+    }
+
+    void vkCmdBlitImage2KHR( VkCommandBuffer commandBuffer, const VkBlitImageInfo2KHR* pBlitImageInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBlitImage2KHR( commandBuffer, pBlitImageInfo );
+    }
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    void vkCmdBuildAccelerationStructureIndirectKHR( VkCommandBuffer commandBuffer, const VkAccelerationStructureBuildGeometryInfoKHR* pInfo, VkBuffer indirectBuffer, VkDeviceSize indirectOffset, uint32_t indirectStride ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBuildAccelerationStructureIndirectKHR( commandBuffer, pInfo, indirectBuffer, indirectOffset, indirectStride );
+    }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    void vkCmdBuildAccelerationStructureKHR( VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, const VkAccelerationStructureBuildOffsetInfoKHR* const * ppOffsetInfos ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBuildAccelerationStructureKHR( commandBuffer, infoCount, pInfos, ppOffsetInfos );
+    }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+    void vkCmdBuildAccelerationStructureNV( VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV* pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset, VkBool32 update, VkAccelerationStructureKHR dst, VkAccelerationStructureKHR src, VkBuffer scratch, VkDeviceSize scratchOffset ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBuildAccelerationStructureNV( commandBuffer, pInfo, instanceData, instanceOffset, update, dst, src, scratch, scratchOffset );
+    }
+
+    void vkCmdClearAttachments( VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkClearAttachment* pAttachments, uint32_t rectCount, const VkClearRect* pRects ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdClearAttachments( commandBuffer, attachmentCount, pAttachments, rectCount, pRects );
+    }
+
+    void vkCmdClearColorImage( VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue* pColor, uint32_t rangeCount, const VkImageSubresourceRange* pRanges ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdClearColorImage( commandBuffer, image, imageLayout, pColor, rangeCount, pRanges );
+    }
+
+    void vkCmdClearDepthStencilImage( VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange* pRanges ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdClearDepthStencilImage( commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges );
+    }
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    void vkCmdCopyAccelerationStructureKHR( VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureInfoKHR* pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCopyAccelerationStructureKHR( commandBuffer, pInfo );
+    }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+    void vkCmdCopyAccelerationStructureNV( VkCommandBuffer commandBuffer, VkAccelerationStructureKHR dst, VkAccelerationStructureKHR src, VkCopyAccelerationStructureModeKHR mode ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCopyAccelerationStructureNV( commandBuffer, dst, src, mode );
+    }
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    void vkCmdCopyAccelerationStructureToMemoryKHR( VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCopyAccelerationStructureToMemoryKHR( commandBuffer, pInfo );
+    }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+    void vkCmdCopyBuffer( VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy* pRegions ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCopyBuffer( commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions );
+    }
+
+    void vkCmdCopyBuffer2KHR( VkCommandBuffer commandBuffer, const VkCopyBufferInfo2KHR* pCopyBufferInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCopyBuffer2KHR( commandBuffer, pCopyBufferInfo );
+    }
+
+    void vkCmdCopyBufferToImage( VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy* pRegions ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCopyBufferToImage( commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions );
+    }
+
+    void vkCmdCopyBufferToImage2KHR( VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2KHR* pCopyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCopyBufferToImage2KHR( commandBuffer, pCopyBufferToImageInfo );
+    }
+
+    void vkCmdCopyImage( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy* pRegions ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCopyImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions );
+    }
+
+    void vkCmdCopyImage2KHR( VkCommandBuffer commandBuffer, const VkCopyImageInfo2KHR* pCopyImageInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCopyImage2KHR( commandBuffer, pCopyImageInfo );
+    }
+
+    void vkCmdCopyImageToBuffer( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy* pRegions ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCopyImageToBuffer( commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions );
+    }
+
+    void vkCmdCopyImageToBuffer2KHR( VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2KHR* pCopyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCopyImageToBuffer2KHR( commandBuffer, pCopyImageToBufferInfo );
+    }
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    void vkCmdCopyMemoryToAccelerationStructureKHR( VkCommandBuffer commandBuffer, const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCopyMemoryToAccelerationStructureKHR( commandBuffer, pInfo );
+    }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+    void vkCmdCopyQueryPoolResults( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride, VkQueryResultFlags flags ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCopyQueryPoolResults( commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags );
+    }
+
+    void vkCmdDebugMarkerBeginEXT( VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDebugMarkerBeginEXT( commandBuffer, pMarkerInfo );
+    }
+
+    void vkCmdDebugMarkerEndEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDebugMarkerEndEXT( commandBuffer );
+    }
+
+    void vkCmdDebugMarkerInsertEXT( VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDebugMarkerInsertEXT( commandBuffer, pMarkerInfo );
+    }
+
+    void vkCmdDispatch( VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDispatch( commandBuffer, groupCountX, groupCountY, groupCountZ );
+    }
+
+    void vkCmdDispatchBase( VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDispatchBase( commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
+    }
+
+    void vkCmdDispatchBaseKHR( VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDispatchBaseKHR( commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
+    }
+
+    void vkCmdDispatchIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDispatchIndirect( commandBuffer, buffer, offset );
+    }
+
+    void vkCmdDraw( VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDraw( commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance );
+    }
+
+    void vkCmdDrawIndexed( VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDrawIndexed( commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance );
+    }
+
+    void vkCmdDrawIndexedIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDrawIndexedIndirect( commandBuffer, buffer, offset, drawCount, stride );
+    }
+
+    void vkCmdDrawIndexedIndirectCount( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDrawIndexedIndirectCount( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
+    }
+
+    void vkCmdDrawIndexedIndirectCountAMD( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDrawIndexedIndirectCountAMD( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
+    }
+
+    void vkCmdDrawIndexedIndirectCountKHR( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDrawIndexedIndirectCountKHR( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
+    }
+
+    void vkCmdDrawIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDrawIndirect( commandBuffer, buffer, offset, drawCount, stride );
+    }
+
+    void vkCmdDrawIndirectByteCountEXT( VkCommandBuffer commandBuffer, uint32_t instanceCount, uint32_t firstInstance, VkBuffer counterBuffer, VkDeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDrawIndirectByteCountEXT( commandBuffer, instanceCount, firstInstance, counterBuffer, counterBufferOffset, counterOffset, vertexStride );
+    }
+
+    void vkCmdDrawIndirectCount( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDrawIndirectCount( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
+    }
+
+    void vkCmdDrawIndirectCountAMD( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDrawIndirectCountAMD( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
+    }
+
+    void vkCmdDrawIndirectCountKHR( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDrawIndirectCountKHR( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
+    }
+
+    void vkCmdDrawMeshTasksIndirectCountNV( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDrawMeshTasksIndirectCountNV( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
+    }
+
+    void vkCmdDrawMeshTasksIndirectNV( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDrawMeshTasksIndirectNV( commandBuffer, buffer, offset, drawCount, stride );
+    }
+
+    void vkCmdDrawMeshTasksNV( VkCommandBuffer commandBuffer, uint32_t taskCount, uint32_t firstTask ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDrawMeshTasksNV( commandBuffer, taskCount, firstTask );
+    }
+
+    void vkCmdEndConditionalRenderingEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdEndConditionalRenderingEXT( commandBuffer );
+    }
+
+    void vkCmdEndDebugUtilsLabelEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdEndDebugUtilsLabelEXT( commandBuffer );
+    }
+
+    void vkCmdEndQuery( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdEndQuery( commandBuffer, queryPool, query );
+    }
+
+    void vkCmdEndQueryIndexedEXT( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, uint32_t index ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdEndQueryIndexedEXT( commandBuffer, queryPool, query, index );
+    }
+
+    void vkCmdEndRenderPass( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdEndRenderPass( commandBuffer );
+    }
+
+    void vkCmdEndRenderPass2( VkCommandBuffer commandBuffer, const VkSubpassEndInfo* pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdEndRenderPass2( commandBuffer, pSubpassEndInfo );
+    }
+
+    void vkCmdEndRenderPass2KHR( VkCommandBuffer commandBuffer, const VkSubpassEndInfo* pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdEndRenderPass2KHR( commandBuffer, pSubpassEndInfo );
+    }
+
+    void vkCmdEndTransformFeedbackEXT( VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdEndTransformFeedbackEXT( commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets );
+    }
+
+    void vkCmdExecuteCommands( VkCommandBuffer commandBuffer, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdExecuteCommands( commandBuffer, commandBufferCount, pCommandBuffers );
+    }
+
+    void vkCmdExecuteGeneratedCommandsNV( VkCommandBuffer commandBuffer, VkBool32 isPreprocessed, const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdExecuteGeneratedCommandsNV( commandBuffer, isPreprocessed, pGeneratedCommandsInfo );
+    }
+
+    void vkCmdFillBuffer( VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdFillBuffer( commandBuffer, dstBuffer, dstOffset, size, data );
+    }
+
+    void vkCmdInsertDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdInsertDebugUtilsLabelEXT( commandBuffer, pLabelInfo );
+    }
+
+    void vkCmdNextSubpass( VkCommandBuffer commandBuffer, VkSubpassContents contents ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdNextSubpass( commandBuffer, contents );
+    }
+
+    void vkCmdNextSubpass2( VkCommandBuffer commandBuffer, const VkSubpassBeginInfo* pSubpassBeginInfo, const VkSubpassEndInfo* pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdNextSubpass2( commandBuffer, pSubpassBeginInfo, pSubpassEndInfo );
+    }
+
+    void vkCmdNextSubpass2KHR( VkCommandBuffer commandBuffer, const VkSubpassBeginInfo* pSubpassBeginInfo, const VkSubpassEndInfo* pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdNextSubpass2KHR( commandBuffer, pSubpassBeginInfo, pSubpassEndInfo );
+    }
+
+    void vkCmdPipelineBarrier( VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdPipelineBarrier( commandBuffer, srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers );
+    }
+
+    void vkCmdPreprocessGeneratedCommandsNV( VkCommandBuffer commandBuffer, const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdPreprocessGeneratedCommandsNV( commandBuffer, pGeneratedCommandsInfo );
+    }
+
+    void vkCmdPushConstants( VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdPushConstants( commandBuffer, layout, stageFlags, offset, size, pValues );
+    }
+
+    void vkCmdPushDescriptorSetKHR( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdPushDescriptorSetKHR( commandBuffer, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites );
+    }
+
+    void vkCmdPushDescriptorSetWithTemplateKHR( VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplate descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set, const void* pData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdPushDescriptorSetWithTemplateKHR( commandBuffer, descriptorUpdateTemplate, layout, set, pData );
+    }
+
+    void vkCmdResetEvent( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdResetEvent( commandBuffer, event, stageMask );
+    }
+
+    void vkCmdResetQueryPool( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdResetQueryPool( commandBuffer, queryPool, firstQuery, queryCount );
+    }
+
+    void vkCmdResolveImage( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageResolve* pRegions ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdResolveImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions );
+    }
+
+    void vkCmdResolveImage2KHR( VkCommandBuffer commandBuffer, const VkResolveImageInfo2KHR* pResolveImageInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdResolveImage2KHR( commandBuffer, pResolveImageInfo );
+    }
+
+    void vkCmdSetBlendConstants( VkCommandBuffer commandBuffer, const float blendConstants[4] ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetBlendConstants( commandBuffer, blendConstants );
+    }
+
+    void vkCmdSetCheckpointNV( VkCommandBuffer commandBuffer, const void* pCheckpointMarker ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetCheckpointNV( commandBuffer, pCheckpointMarker );
+    }
+
+    void vkCmdSetCoarseSampleOrderNV( VkCommandBuffer commandBuffer, VkCoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VkCoarseSampleOrderCustomNV* pCustomSampleOrders ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetCoarseSampleOrderNV( commandBuffer, sampleOrderType, customSampleOrderCount, pCustomSampleOrders );
+    }
+
+    void vkCmdSetCullModeEXT( VkCommandBuffer commandBuffer, VkCullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetCullModeEXT( commandBuffer, cullMode );
+    }
+
+    void vkCmdSetDepthBias( VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetDepthBias( commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor );
+    }
+
+    void vkCmdSetDepthBounds( VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetDepthBounds( commandBuffer, minDepthBounds, maxDepthBounds );
+    }
+
+    void vkCmdSetDepthBoundsTestEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetDepthBoundsTestEnableEXT( commandBuffer, depthBoundsTestEnable );
+    }
+
+    void vkCmdSetDepthCompareOpEXT( VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetDepthCompareOpEXT( commandBuffer, depthCompareOp );
+    }
+
+    void vkCmdSetDepthTestEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetDepthTestEnableEXT( commandBuffer, depthTestEnable );
+    }
+
+    void vkCmdSetDepthWriteEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetDepthWriteEnableEXT( commandBuffer, depthWriteEnable );
+    }
+
+    void vkCmdSetDeviceMask( VkCommandBuffer commandBuffer, uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetDeviceMask( commandBuffer, deviceMask );
+    }
+
+    void vkCmdSetDeviceMaskKHR( VkCommandBuffer commandBuffer, uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetDeviceMaskKHR( commandBuffer, deviceMask );
+    }
+
+    void vkCmdSetDiscardRectangleEXT( VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VkRect2D* pDiscardRectangles ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetDiscardRectangleEXT( commandBuffer, firstDiscardRectangle, discardRectangleCount, pDiscardRectangles );
+    }
+
+    void vkCmdSetEvent( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetEvent( commandBuffer, event, stageMask );
+    }
+
+    void vkCmdSetExclusiveScissorNV( VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VkRect2D* pExclusiveScissors ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetExclusiveScissorNV( commandBuffer, firstExclusiveScissor, exclusiveScissorCount, pExclusiveScissors );
+    }
+
+    void vkCmdSetFragmentShadingRateKHR( VkCommandBuffer commandBuffer, const VkExtent2D* pFragmentSize, const VkFragmentShadingRateCombinerOpKHR combinerOps[2] ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetFragmentShadingRateKHR( commandBuffer, pFragmentSize, combinerOps );
+    }
+
+    void vkCmdSetFrontFaceEXT( VkCommandBuffer commandBuffer, VkFrontFace frontFace ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetFrontFaceEXT( commandBuffer, frontFace );
+    }
+
+    void vkCmdSetLineStippleEXT( VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetLineStippleEXT( commandBuffer, lineStippleFactor, lineStipplePattern );
+    }
+
+    void vkCmdSetLineWidth( VkCommandBuffer commandBuffer, float lineWidth ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetLineWidth( commandBuffer, lineWidth );
+    }
+
+    VkResult vkCmdSetPerformanceMarkerINTEL( VkCommandBuffer commandBuffer, const VkPerformanceMarkerInfoINTEL* pMarkerInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetPerformanceMarkerINTEL( commandBuffer, pMarkerInfo );
+    }
+
+    VkResult vkCmdSetPerformanceOverrideINTEL( VkCommandBuffer commandBuffer, const VkPerformanceOverrideInfoINTEL* pOverrideInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetPerformanceOverrideINTEL( commandBuffer, pOverrideInfo );
+    }
+
+    VkResult vkCmdSetPerformanceStreamMarkerINTEL( VkCommandBuffer commandBuffer, const VkPerformanceStreamMarkerInfoINTEL* pMarkerInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetPerformanceStreamMarkerINTEL( commandBuffer, pMarkerInfo );
+    }
+
+    void vkCmdSetPrimitiveTopologyEXT( VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetPrimitiveTopologyEXT( commandBuffer, primitiveTopology );
+    }
+
+    void vkCmdSetSampleLocationsEXT( VkCommandBuffer commandBuffer, const VkSampleLocationsInfoEXT* pSampleLocationsInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetSampleLocationsEXT( commandBuffer, pSampleLocationsInfo );
+    }
+
+    void vkCmdSetScissor( VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D* pScissors ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetScissor( commandBuffer, firstScissor, scissorCount, pScissors );
+    }
+
+    void vkCmdSetScissorWithCountEXT( VkCommandBuffer commandBuffer, uint32_t scissorCount, const VkRect2D* pScissors ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetScissorWithCountEXT( commandBuffer, scissorCount, pScissors );
+    }
+
+    void vkCmdSetStencilCompareMask( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetStencilCompareMask( commandBuffer, faceMask, compareMask );
+    }
+
+    void vkCmdSetStencilOpEXT( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, VkStencilOp failOp, VkStencilOp passOp, VkStencilOp depthFailOp, VkCompareOp compareOp ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetStencilOpEXT( commandBuffer, faceMask, failOp, passOp, depthFailOp, compareOp );
+    }
+
+    void vkCmdSetStencilReference( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetStencilReference( commandBuffer, faceMask, reference );
+    }
+
+    void vkCmdSetStencilTestEnableEXT( VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetStencilTestEnableEXT( commandBuffer, stencilTestEnable );
+    }
+
+    void vkCmdSetStencilWriteMask( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetStencilWriteMask( commandBuffer, faceMask, writeMask );
+    }
+
+    void vkCmdSetViewport( VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport* pViewports ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetViewport( commandBuffer, firstViewport, viewportCount, pViewports );
+    }
+
+    void vkCmdSetViewportShadingRatePaletteNV( VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkShadingRatePaletteNV* pShadingRatePalettes ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetViewportShadingRatePaletteNV( commandBuffer, firstViewport, viewportCount, pShadingRatePalettes );
+    }
+
+    void vkCmdSetViewportWScalingNV( VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewportWScalingNV* pViewportWScalings ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetViewportWScalingNV( commandBuffer, firstViewport, viewportCount, pViewportWScalings );
+    }
+
+    void vkCmdSetViewportWithCountEXT( VkCommandBuffer commandBuffer, uint32_t viewportCount, const VkViewport* pViewports ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetViewportWithCountEXT( commandBuffer, viewportCount, pViewports );
+    }
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    void vkCmdTraceRaysIndirectKHR( VkCommandBuffer commandBuffer, const VkStridedBufferRegionKHR* pRaygenShaderBindingTable, const VkStridedBufferRegionKHR* pMissShaderBindingTable, const VkStridedBufferRegionKHR* pHitShaderBindingTable, const VkStridedBufferRegionKHR* pCallableShaderBindingTable, VkBuffer buffer, VkDeviceSize offset ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdTraceRaysIndirectKHR( commandBuffer, pRaygenShaderBindingTable, pMissShaderBindingTable, pHitShaderBindingTable, pCallableShaderBindingTable, buffer, offset );
+    }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    void vkCmdTraceRaysKHR( VkCommandBuffer commandBuffer, const VkStridedBufferRegionKHR* pRaygenShaderBindingTable, const VkStridedBufferRegionKHR* pMissShaderBindingTable, const VkStridedBufferRegionKHR* pHitShaderBindingTable, const VkStridedBufferRegionKHR* pCallableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdTraceRaysKHR( commandBuffer, pRaygenShaderBindingTable, pMissShaderBindingTable, pHitShaderBindingTable, pCallableShaderBindingTable, width, height, depth );
+    }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+    void vkCmdTraceRaysNV( VkCommandBuffer commandBuffer, VkBuffer raygenShaderBindingTableBuffer, VkDeviceSize raygenShaderBindingOffset, VkBuffer missShaderBindingTableBuffer, VkDeviceSize missShaderBindingOffset, VkDeviceSize missShaderBindingStride, VkBuffer hitShaderBindingTableBuffer, VkDeviceSize hitShaderBindingOffset, VkDeviceSize hitShaderBindingStride, VkBuffer callableShaderBindingTableBuffer, VkDeviceSize callableShaderBindingOffset, VkDeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdTraceRaysNV( commandBuffer, raygenShaderBindingTableBuffer, raygenShaderBindingOffset, missShaderBindingTableBuffer, missShaderBindingOffset, missShaderBindingStride, hitShaderBindingTableBuffer, hitShaderBindingOffset, hitShaderBindingStride, callableShaderBindingTableBuffer, callableShaderBindingOffset, callableShaderBindingStride, width, height, depth );
+    }
+
+    void vkCmdUpdateBuffer( VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void* pData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdUpdateBuffer( commandBuffer, dstBuffer, dstOffset, dataSize, pData );
+    }
+
+    void vkCmdWaitEvents( VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdWaitEvents( commandBuffer, eventCount, pEvents, srcStageMask, dstStageMask, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers );
+    }
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    void vkCmdWriteAccelerationStructuresPropertiesKHR( VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdWriteAccelerationStructuresPropertiesKHR( commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery );
+    }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+    void vkCmdWriteAccelerationStructuresPropertiesNV( VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdWriteAccelerationStructuresPropertiesNV( commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery );
+    }
+
+    void vkCmdWriteBufferMarkerAMD( VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdWriteBufferMarkerAMD( commandBuffer, pipelineStage, dstBuffer, dstOffset, marker );
+    }
+
+    void vkCmdWriteTimestamp( VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdWriteTimestamp( commandBuffer, pipelineStage, queryPool, query );
+    }
+
+    VkResult vkCompileDeferredNV( VkDevice device, VkPipeline pipeline, uint32_t shader ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCompileDeferredNV( device, pipeline, shader );
+    }
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VkResult vkCopyAccelerationStructureKHR( VkDevice device, const VkCopyAccelerationStructureInfoKHR* pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCopyAccelerationStructureKHR( device, pInfo );
+    }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VkResult vkCopyAccelerationStructureToMemoryKHR( VkDevice device, const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCopyAccelerationStructureToMemoryKHR( device, pInfo );
+    }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VkResult vkCopyMemoryToAccelerationStructureKHR( VkDevice device, const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCopyMemoryToAccelerationStructureKHR( device, pInfo );
+    }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VkResult vkCreateAccelerationStructureKHR( VkDevice device, const VkAccelerationStructureCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkAccelerationStructureKHR* pAccelerationStructure ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateAccelerationStructureKHR( device, pCreateInfo, pAllocator, pAccelerationStructure );
+    }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+    VkResult vkCreateAccelerationStructureNV( VkDevice device, const VkAccelerationStructureCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkAccelerationStructureNV* pAccelerationStructure ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateAccelerationStructureNV( device, pCreateInfo, pAllocator, pAccelerationStructure );
+    }
+
 #ifdef VK_USE_PLATFORM_ANDROID_KHR
-  VkResult vkCreateAndroidSurfaceKHR( VkInstance instance, const VkAndroidSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface  ) const
-  {
-    return ::vkCreateAndroidSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface);
-  }
+    VkResult vkCreateAndroidSurfaceKHR( VkInstance instance, const VkAndroidSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateAndroidSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface );
+    }
 #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
-  VkResult vkCreateBuffer( VkDevice device, const VkBufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer  ) const
-  {
-    return ::vkCreateBuffer( device, pCreateInfo, pAllocator, pBuffer);
-  }
-  VkResult vkCreateBufferView( VkDevice device, const VkBufferViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBufferView* pView  ) const
-  {
-    return ::vkCreateBufferView( device, pCreateInfo, pAllocator, pView);
-  }
-  VkResult vkCreateCommandPool( VkDevice device, const VkCommandPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCommandPool* pCommandPool  ) const
-  {
-    return ::vkCreateCommandPool( device, pCreateInfo, pAllocator, pCommandPool);
-  }
-  VkResult vkCreateComputePipelines( VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines  ) const
-  {
-    return ::vkCreateComputePipelines( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
-  }
-  VkResult vkCreateDebugReportCallbackEXT( VkInstance instance, const VkDebugReportCallbackCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugReportCallbackEXT* pCallback  ) const
-  {
-    return ::vkCreateDebugReportCallbackEXT( instance, pCreateInfo, pAllocator, pCallback);
-  }
-  VkResult vkCreateDebugUtilsMessengerEXT( VkInstance instance, const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugUtilsMessengerEXT* pMessenger  ) const
-  {
-    return ::vkCreateDebugUtilsMessengerEXT( instance, pCreateInfo, pAllocator, pMessenger);
-  }
-  VkResult vkCreateDescriptorPool( VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorPool* pDescriptorPool  ) const
-  {
-    return ::vkCreateDescriptorPool( device, pCreateInfo, pAllocator, pDescriptorPool);
-  }
-  VkResult vkCreateDescriptorSetLayout( VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorSetLayout* pSetLayout  ) const
-  {
-    return ::vkCreateDescriptorSetLayout( device, pCreateInfo, pAllocator, pSetLayout);
-  }
-  VkResult vkCreateDescriptorUpdateTemplate( VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate  ) const
-  {
-    return ::vkCreateDescriptorUpdateTemplate( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate);
-  }
-  VkResult vkCreateDescriptorUpdateTemplateKHR( VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate  ) const
-  {
-    return ::vkCreateDescriptorUpdateTemplateKHR( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate);
-  }
-  VkResult vkCreateDevice( VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDevice* pDevice  ) const
-  {
-    return ::vkCreateDevice( physicalDevice, pCreateInfo, pAllocator, pDevice);
-  }
-  VkResult vkCreateDisplayModeKHR( VkPhysicalDevice physicalDevice, VkDisplayKHR display, const VkDisplayModeCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDisplayModeKHR* pMode  ) const
-  {
-    return ::vkCreateDisplayModeKHR( physicalDevice, display, pCreateInfo, pAllocator, pMode);
-  }
-  VkResult vkCreateDisplayPlaneSurfaceKHR( VkInstance instance, const VkDisplaySurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface  ) const
-  {
-    return ::vkCreateDisplayPlaneSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface);
-  }
-  VkResult vkCreateEvent( VkDevice device, const VkEventCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkEvent* pEvent  ) const
-  {
-    return ::vkCreateEvent( device, pCreateInfo, pAllocator, pEvent);
-  }
-  VkResult vkCreateFence( VkDevice device, const VkFenceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence  ) const
-  {
-    return ::vkCreateFence( device, pCreateInfo, pAllocator, pFence);
-  }
-  VkResult vkCreateFramebuffer( VkDevice device, const VkFramebufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFramebuffer* pFramebuffer  ) const
-  {
-    return ::vkCreateFramebuffer( device, pCreateInfo, pAllocator, pFramebuffer);
-  }
-  VkResult vkCreateGraphicsPipelines( VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines  ) const
-  {
-    return ::vkCreateGraphicsPipelines( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
-  }
+
+    VkResult vkCreateBuffer( VkDevice device, const VkBufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateBuffer( device, pCreateInfo, pAllocator, pBuffer );
+    }
+
+    VkResult vkCreateBufferView( VkDevice device, const VkBufferViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBufferView* pView ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateBufferView( device, pCreateInfo, pAllocator, pView );
+    }
+
+    VkResult vkCreateCommandPool( VkDevice device, const VkCommandPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCommandPool* pCommandPool ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateCommandPool( device, pCreateInfo, pAllocator, pCommandPool );
+    }
+
+    VkResult vkCreateComputePipelines( VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateComputePipelines( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines );
+    }
+
+    VkResult vkCreateDebugReportCallbackEXT( VkInstance instance, const VkDebugReportCallbackCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugReportCallbackEXT* pCallback ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateDebugReportCallbackEXT( instance, pCreateInfo, pAllocator, pCallback );
+    }
+
+    VkResult vkCreateDebugUtilsMessengerEXT( VkInstance instance, const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugUtilsMessengerEXT* pMessenger ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateDebugUtilsMessengerEXT( instance, pCreateInfo, pAllocator, pMessenger );
+    }
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VkResult vkCreateDeferredOperationKHR( VkDevice device, const VkAllocationCallbacks* pAllocator, VkDeferredOperationKHR* pDeferredOperation ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateDeferredOperationKHR( device, pAllocator, pDeferredOperation );
+    }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+    VkResult vkCreateDescriptorPool( VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorPool* pDescriptorPool ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateDescriptorPool( device, pCreateInfo, pAllocator, pDescriptorPool );
+    }
+
+    VkResult vkCreateDescriptorSetLayout( VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorSetLayout* pSetLayout ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateDescriptorSetLayout( device, pCreateInfo, pAllocator, pSetLayout );
+    }
+
+    VkResult vkCreateDescriptorUpdateTemplate( VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateDescriptorUpdateTemplate( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate );
+    }
+
+    VkResult vkCreateDescriptorUpdateTemplateKHR( VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateDescriptorUpdateTemplateKHR( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate );
+    }
+
+    VkResult vkCreateDevice( VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDevice* pDevice ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateDevice( physicalDevice, pCreateInfo, pAllocator, pDevice );
+    }
+
+#ifdef VK_USE_PLATFORM_DIRECTFB_EXT
+    VkResult vkCreateDirectFBSurfaceEXT( VkInstance instance, const VkDirectFBSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateDirectFBSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface );
+    }
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
+
+    VkResult vkCreateDisplayModeKHR( VkPhysicalDevice physicalDevice, VkDisplayKHR display, const VkDisplayModeCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDisplayModeKHR* pMode ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateDisplayModeKHR( physicalDevice, display, pCreateInfo, pAllocator, pMode );
+    }
+
+    VkResult vkCreateDisplayPlaneSurfaceKHR( VkInstance instance, const VkDisplaySurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateDisplayPlaneSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface );
+    }
+
+    VkResult vkCreateEvent( VkDevice device, const VkEventCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkEvent* pEvent ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateEvent( device, pCreateInfo, pAllocator, pEvent );
+    }
+
+    VkResult vkCreateFence( VkDevice device, const VkFenceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateFence( device, pCreateInfo, pAllocator, pFence );
+    }
+
+    VkResult vkCreateFramebuffer( VkDevice device, const VkFramebufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFramebuffer* pFramebuffer ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateFramebuffer( device, pCreateInfo, pAllocator, pFramebuffer );
+    }
+
+    VkResult vkCreateGraphicsPipelines( VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateGraphicsPipelines( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines );
+    }
+
+    VkResult vkCreateHeadlessSurfaceEXT( VkInstance instance, const VkHeadlessSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateHeadlessSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface );
+    }
+
 #ifdef VK_USE_PLATFORM_IOS_MVK
-  VkResult vkCreateIOSSurfaceMVK( VkInstance instance, const VkIOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface  ) const
-  {
-    return ::vkCreateIOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface);
-  }
+    VkResult vkCreateIOSSurfaceMVK( VkInstance instance, const VkIOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateIOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface );
+    }
 #endif /*VK_USE_PLATFORM_IOS_MVK*/
-  VkResult vkCreateImage( VkDevice device, const VkImageCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImage* pImage  ) const
-  {
-    return ::vkCreateImage( device, pCreateInfo, pAllocator, pImage);
-  }
-#ifdef VK_USE_PLATFORM_FUCHSIA_FUCHSIA
-  VkResult vkCreateImagePipeSurfaceFUCHSIA( VkInstance instance, const VkImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface  ) const
-  {
-    return ::vkCreateImagePipeSurfaceFUCHSIA( instance, pCreateInfo, pAllocator, pSurface);
-  }
-#endif /*VK_USE_PLATFORM_FUCHSIA_FUCHSIA*/
-  VkResult vkCreateImageView( VkDevice device, const VkImageViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImageView* pView  ) const
-  {
-    return ::vkCreateImageView( device, pCreateInfo, pAllocator, pView);
-  }
-  VkResult vkCreateIndirectCommandsLayoutNVX( VkDevice device, const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectCommandsLayoutNVX* pIndirectCommandsLayout  ) const
-  {
-    return ::vkCreateIndirectCommandsLayoutNVX( device, pCreateInfo, pAllocator, pIndirectCommandsLayout);
-  }
-  VkResult vkCreateInstance( const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkInstance* pInstance  ) const
-  {
-    return ::vkCreateInstance( pCreateInfo, pAllocator, pInstance);
-  }
+
+    VkResult vkCreateImage( VkDevice device, const VkImageCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImage* pImage ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateImage( device, pCreateInfo, pAllocator, pImage );
+    }
+
+#ifdef VK_USE_PLATFORM_FUCHSIA
+    VkResult vkCreateImagePipeSurfaceFUCHSIA( VkInstance instance, const VkImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateImagePipeSurfaceFUCHSIA( instance, pCreateInfo, pAllocator, pSurface );
+    }
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+    VkResult vkCreateImageView( VkDevice device, const VkImageViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImageView* pView ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateImageView( device, pCreateInfo, pAllocator, pView );
+    }
+
+    VkResult vkCreateIndirectCommandsLayoutNV( VkDevice device, const VkIndirectCommandsLayoutCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectCommandsLayoutNV* pIndirectCommandsLayout ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateIndirectCommandsLayoutNV( device, pCreateInfo, pAllocator, pIndirectCommandsLayout );
+    }
+
+    VkResult vkCreateInstance( const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkInstance* pInstance ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateInstance( pCreateInfo, pAllocator, pInstance );
+    }
+
 #ifdef VK_USE_PLATFORM_MACOS_MVK
-  VkResult vkCreateMacOSSurfaceMVK( VkInstance instance, const VkMacOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface  ) const
-  {
-    return ::vkCreateMacOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface);
-  }
+    VkResult vkCreateMacOSSurfaceMVK( VkInstance instance, const VkMacOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateMacOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface );
+    }
 #endif /*VK_USE_PLATFORM_MACOS_MVK*/
-  VkResult vkCreateObjectTableNVX( VkDevice device, const VkObjectTableCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkObjectTableNVX* pObjectTable  ) const
-  {
-    return ::vkCreateObjectTableNVX( device, pCreateInfo, pAllocator, pObjectTable);
-  }
-  VkResult vkCreatePipelineCache( VkDevice device, const VkPipelineCacheCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineCache* pPipelineCache  ) const
-  {
-    return ::vkCreatePipelineCache( device, pCreateInfo, pAllocator, pPipelineCache);
-  }
-  VkResult vkCreatePipelineLayout( VkDevice device, const VkPipelineLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineLayout* pPipelineLayout  ) const
-  {
-    return ::vkCreatePipelineLayout( device, pCreateInfo, pAllocator, pPipelineLayout);
-  }
-  VkResult vkCreateQueryPool( VkDevice device, const VkQueryPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkQueryPool* pQueryPool  ) const
-  {
-    return ::vkCreateQueryPool( device, pCreateInfo, pAllocator, pQueryPool);
-  }
-  VkResult vkCreateRayTracingPipelinesNV( VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoNV* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines  ) const
-  {
-    return ::vkCreateRayTracingPipelinesNV( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
-  }
-  VkResult vkCreateRenderPass( VkDevice device, const VkRenderPassCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass  ) const
-  {
-    return ::vkCreateRenderPass( device, pCreateInfo, pAllocator, pRenderPass);
-  }
-  VkResult vkCreateRenderPass2KHR( VkDevice device, const VkRenderPassCreateInfo2KHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass  ) const
-  {
-    return ::vkCreateRenderPass2KHR( device, pCreateInfo, pAllocator, pRenderPass);
-  }
-  VkResult vkCreateSampler( VkDevice device, const VkSamplerCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSampler* pSampler  ) const
-  {
-    return ::vkCreateSampler( device, pCreateInfo, pAllocator, pSampler);
-  }
-  VkResult vkCreateSamplerYcbcrConversion( VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion  ) const
-  {
-    return ::vkCreateSamplerYcbcrConversion( device, pCreateInfo, pAllocator, pYcbcrConversion);
-  }
-  VkResult vkCreateSamplerYcbcrConversionKHR( VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion  ) const
-  {
-    return ::vkCreateSamplerYcbcrConversionKHR( device, pCreateInfo, pAllocator, pYcbcrConversion);
-  }
-  VkResult vkCreateSemaphore( VkDevice device, const VkSemaphoreCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSemaphore* pSemaphore  ) const
-  {
-    return ::vkCreateSemaphore( device, pCreateInfo, pAllocator, pSemaphore);
-  }
-  VkResult vkCreateShaderModule( VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule  ) const
-  {
-    return ::vkCreateShaderModule( device, pCreateInfo, pAllocator, pShaderModule);
-  }
-  VkResult vkCreateSharedSwapchainsKHR( VkDevice device, uint32_t swapchainCount, const VkSwapchainCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchains  ) const
-  {
-    return ::vkCreateSharedSwapchainsKHR( device, swapchainCount, pCreateInfos, pAllocator, pSwapchains);
-  }
-  VkResult vkCreateSwapchainKHR( VkDevice device, const VkSwapchainCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchain  ) const
-  {
-    return ::vkCreateSwapchainKHR( device, pCreateInfo, pAllocator, pSwapchain);
-  }
-  VkResult vkCreateValidationCacheEXT( VkDevice device, const VkValidationCacheCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkValidationCacheEXT* pValidationCache  ) const
-  {
-    return ::vkCreateValidationCacheEXT( device, pCreateInfo, pAllocator, pValidationCache);
-  }
+
+#ifdef VK_USE_PLATFORM_METAL_EXT
+    VkResult vkCreateMetalSurfaceEXT( VkInstance instance, const VkMetalSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateMetalSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface );
+    }
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+    VkResult vkCreatePipelineCache( VkDevice device, const VkPipelineCacheCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineCache* pPipelineCache ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreatePipelineCache( device, pCreateInfo, pAllocator, pPipelineCache );
+    }
+
+    VkResult vkCreatePipelineLayout( VkDevice device, const VkPipelineLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineLayout* pPipelineLayout ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreatePipelineLayout( device, pCreateInfo, pAllocator, pPipelineLayout );
+    }
+
+    VkResult vkCreatePrivateDataSlotEXT( VkDevice device, const VkPrivateDataSlotCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPrivateDataSlotEXT* pPrivateDataSlot ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreatePrivateDataSlotEXT( device, pCreateInfo, pAllocator, pPrivateDataSlot );
+    }
+
+    VkResult vkCreateQueryPool( VkDevice device, const VkQueryPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkQueryPool* pQueryPool ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateQueryPool( device, pCreateInfo, pAllocator, pQueryPool );
+    }
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VkResult vkCreateRayTracingPipelinesKHR( VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateRayTracingPipelinesKHR( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines );
+    }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+    VkResult vkCreateRayTracingPipelinesNV( VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoNV* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateRayTracingPipelinesNV( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines );
+    }
+
+    VkResult vkCreateRenderPass( VkDevice device, const VkRenderPassCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateRenderPass( device, pCreateInfo, pAllocator, pRenderPass );
+    }
+
+    VkResult vkCreateRenderPass2( VkDevice device, const VkRenderPassCreateInfo2* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateRenderPass2( device, pCreateInfo, pAllocator, pRenderPass );
+    }
+
+    VkResult vkCreateRenderPass2KHR( VkDevice device, const VkRenderPassCreateInfo2* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateRenderPass2KHR( device, pCreateInfo, pAllocator, pRenderPass );
+    }
+
+    VkResult vkCreateSampler( VkDevice device, const VkSamplerCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSampler* pSampler ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateSampler( device, pCreateInfo, pAllocator, pSampler );
+    }
+
+    VkResult vkCreateSamplerYcbcrConversion( VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateSamplerYcbcrConversion( device, pCreateInfo, pAllocator, pYcbcrConversion );
+    }
+
+    VkResult vkCreateSamplerYcbcrConversionKHR( VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateSamplerYcbcrConversionKHR( device, pCreateInfo, pAllocator, pYcbcrConversion );
+    }
+
+    VkResult vkCreateSemaphore( VkDevice device, const VkSemaphoreCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSemaphore* pSemaphore ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateSemaphore( device, pCreateInfo, pAllocator, pSemaphore );
+    }
+
+    VkResult vkCreateShaderModule( VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateShaderModule( device, pCreateInfo, pAllocator, pShaderModule );
+    }
+
+    VkResult vkCreateSharedSwapchainsKHR( VkDevice device, uint32_t swapchainCount, const VkSwapchainCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchains ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateSharedSwapchainsKHR( device, swapchainCount, pCreateInfos, pAllocator, pSwapchains );
+    }
+
+#ifdef VK_USE_PLATFORM_GGP
+    VkResult vkCreateStreamDescriptorSurfaceGGP( VkInstance instance, const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateStreamDescriptorSurfaceGGP( instance, pCreateInfo, pAllocator, pSurface );
+    }
+#endif /*VK_USE_PLATFORM_GGP*/
+
+    VkResult vkCreateSwapchainKHR( VkDevice device, const VkSwapchainCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchain ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateSwapchainKHR( device, pCreateInfo, pAllocator, pSwapchain );
+    }
+
+    VkResult vkCreateValidationCacheEXT( VkDevice device, const VkValidationCacheCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkValidationCacheEXT* pValidationCache ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateValidationCacheEXT( device, pCreateInfo, pAllocator, pValidationCache );
+    }
+
 #ifdef VK_USE_PLATFORM_VI_NN
-  VkResult vkCreateViSurfaceNN( VkInstance instance, const VkViSurfaceCreateInfoNN* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface  ) const
-  {
-    return ::vkCreateViSurfaceNN( instance, pCreateInfo, pAllocator, pSurface);
-  }
+    VkResult vkCreateViSurfaceNN( VkInstance instance, const VkViSurfaceCreateInfoNN* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateViSurfaceNN( instance, pCreateInfo, pAllocator, pSurface );
+    }
 #endif /*VK_USE_PLATFORM_VI_NN*/
+
 #ifdef VK_USE_PLATFORM_WAYLAND_KHR
-  VkResult vkCreateWaylandSurfaceKHR( VkInstance instance, const VkWaylandSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface  ) const
-  {
-    return ::vkCreateWaylandSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface);
-  }
+    VkResult vkCreateWaylandSurfaceKHR( VkInstance instance, const VkWaylandSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateWaylandSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface );
+    }
 #endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
 #ifdef VK_USE_PLATFORM_WIN32_KHR
-  VkResult vkCreateWin32SurfaceKHR( VkInstance instance, const VkWin32SurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface  ) const
-  {
-    return ::vkCreateWin32SurfaceKHR( instance, pCreateInfo, pAllocator, pSurface);
-  }
+    VkResult vkCreateWin32SurfaceKHR( VkInstance instance, const VkWin32SurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateWin32SurfaceKHR( instance, pCreateInfo, pAllocator, pSurface );
+    }
 #endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
 #ifdef VK_USE_PLATFORM_XCB_KHR
-  VkResult vkCreateXcbSurfaceKHR( VkInstance instance, const VkXcbSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface  ) const
-  {
-    return ::vkCreateXcbSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface);
-  }
+    VkResult vkCreateXcbSurfaceKHR( VkInstance instance, const VkXcbSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateXcbSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface );
+    }
 #endif /*VK_USE_PLATFORM_XCB_KHR*/
+
 #ifdef VK_USE_PLATFORM_XLIB_KHR
-  VkResult vkCreateXlibSurfaceKHR( VkInstance instance, const VkXlibSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface  ) const
-  {
-    return ::vkCreateXlibSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface);
-  }
+    VkResult vkCreateXlibSurfaceKHR( VkInstance instance, const VkXlibSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateXlibSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface );
+    }
 #endif /*VK_USE_PLATFORM_XLIB_KHR*/
-  VkResult vkDebugMarkerSetObjectNameEXT( VkDevice device, const VkDebugMarkerObjectNameInfoEXT* pNameInfo  ) const
-  {
-    return ::vkDebugMarkerSetObjectNameEXT( device, pNameInfo);
-  }
-  VkResult vkDebugMarkerSetObjectTagEXT( VkDevice device, const VkDebugMarkerObjectTagInfoEXT* pTagInfo  ) const
-  {
-    return ::vkDebugMarkerSetObjectTagEXT( device, pTagInfo);
-  }
-  void vkDebugReportMessageEXT( VkInstance instance, VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage  ) const
-  {
-    return ::vkDebugReportMessageEXT( instance, flags, objectType, object, location, messageCode, pLayerPrefix, pMessage);
-  }
-  void vkDestroyAccelerationStructureNV( VkDevice device, VkAccelerationStructureNV accelerationStructure, const VkAllocationCallbacks* pAllocator  ) const
-  {
-    return ::vkDestroyAccelerationStructureNV( device, accelerationStructure, pAllocator);
-  }
-  void vkDestroyBuffer( VkDevice device, VkBuffer buffer, const VkAllocationCallbacks* pAllocator  ) const
-  {
-    return ::vkDestroyBuffer( device, buffer, pAllocator);
-  }
-  void vkDestroyBufferView( VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks* pAllocator  ) const
-  {
-    return ::vkDestroyBufferView( device, bufferView, pAllocator);
-  }
-  void vkDestroyCommandPool( VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks* pAllocator  ) const
-  {
-    return ::vkDestroyCommandPool( device, commandPool, pAllocator);
-  }
-  void vkDestroyDebugReportCallbackEXT( VkInstance instance, VkDebugReportCallbackEXT callback, const VkAllocationCallbacks* pAllocator  ) const
-  {
-    return ::vkDestroyDebugReportCallbackEXT( instance, callback, pAllocator);
-  }
-  void vkDestroyDebugUtilsMessengerEXT( VkInstance instance, VkDebugUtilsMessengerEXT messenger, const VkAllocationCallbacks* pAllocator  ) const
-  {
-    return ::vkDestroyDebugUtilsMessengerEXT( instance, messenger, pAllocator);
-  }
-  void vkDestroyDescriptorPool( VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks* pAllocator  ) const
-  {
-    return ::vkDestroyDescriptorPool( device, descriptorPool, pAllocator);
-  }
-  void vkDestroyDescriptorSetLayout( VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks* pAllocator  ) const
-  {
-    return ::vkDestroyDescriptorSetLayout( device, descriptorSetLayout, pAllocator);
-  }
-  void vkDestroyDescriptorUpdateTemplate( VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator  ) const
-  {
-    return ::vkDestroyDescriptorUpdateTemplate( device, descriptorUpdateTemplate, pAllocator);
-  }
-  void vkDestroyDescriptorUpdateTemplateKHR( VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator  ) const
-  {
-    return ::vkDestroyDescriptorUpdateTemplateKHR( device, descriptorUpdateTemplate, pAllocator);
-  }
-  void vkDestroyDevice( VkDevice device, const VkAllocationCallbacks* pAllocator  ) const
-  {
-    return ::vkDestroyDevice( device, pAllocator);
-  }
-  void vkDestroyEvent( VkDevice device, VkEvent event, const VkAllocationCallbacks* pAllocator  ) const
-  {
-    return ::vkDestroyEvent( device, event, pAllocator);
-  }
-  void vkDestroyFence( VkDevice device, VkFence fence, const VkAllocationCallbacks* pAllocator  ) const
-  {
-    return ::vkDestroyFence( device, fence, pAllocator);
-  }
-  void vkDestroyFramebuffer( VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks* pAllocator  ) const
-  {
-    return ::vkDestroyFramebuffer( device, framebuffer, pAllocator);
-  }
-  void vkDestroyImage( VkDevice device, VkImage image, const VkAllocationCallbacks* pAllocator  ) const
-  {
-    return ::vkDestroyImage( device, image, pAllocator);
-  }
-  void vkDestroyImageView( VkDevice device, VkImageView imageView, const VkAllocationCallbacks* pAllocator  ) const
-  {
-    return ::vkDestroyImageView( device, imageView, pAllocator);
-  }
-  void vkDestroyIndirectCommandsLayoutNVX( VkDevice device, VkIndirectCommandsLayoutNVX indirectCommandsLayout, const VkAllocationCallbacks* pAllocator  ) const
-  {
-    return ::vkDestroyIndirectCommandsLayoutNVX( device, indirectCommandsLayout, pAllocator);
-  }
-  void vkDestroyInstance( VkInstance instance, const VkAllocationCallbacks* pAllocator  ) const
-  {
-    return ::vkDestroyInstance( instance, pAllocator);
-  }
-  void vkDestroyObjectTableNVX( VkDevice device, VkObjectTableNVX objectTable, const VkAllocationCallbacks* pAllocator  ) const
-  {
-    return ::vkDestroyObjectTableNVX( device, objectTable, pAllocator);
-  }
-  void vkDestroyPipeline( VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* pAllocator  ) const
-  {
-    return ::vkDestroyPipeline( device, pipeline, pAllocator);
-  }
-  void vkDestroyPipelineCache( VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks* pAllocator  ) const
-  {
-    return ::vkDestroyPipelineCache( device, pipelineCache, pAllocator);
-  }
-  void vkDestroyPipelineLayout( VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks* pAllocator  ) const
-  {
-    return ::vkDestroyPipelineLayout( device, pipelineLayout, pAllocator);
-  }
-  void vkDestroyQueryPool( VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks* pAllocator  ) const
-  {
-    return ::vkDestroyQueryPool( device, queryPool, pAllocator);
-  }
-  void vkDestroyRenderPass( VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks* pAllocator  ) const
-  {
-    return ::vkDestroyRenderPass( device, renderPass, pAllocator);
-  }
-  void vkDestroySampler( VkDevice device, VkSampler sampler, const VkAllocationCallbacks* pAllocator  ) const
-  {
-    return ::vkDestroySampler( device, sampler, pAllocator);
-  }
-  void vkDestroySamplerYcbcrConversion( VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator  ) const
-  {
-    return ::vkDestroySamplerYcbcrConversion( device, ycbcrConversion, pAllocator);
-  }
-  void vkDestroySamplerYcbcrConversionKHR( VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator  ) const
-  {
-    return ::vkDestroySamplerYcbcrConversionKHR( device, ycbcrConversion, pAllocator);
-  }
-  void vkDestroySemaphore( VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks* pAllocator  ) const
-  {
-    return ::vkDestroySemaphore( device, semaphore, pAllocator);
-  }
-  void vkDestroyShaderModule( VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks* pAllocator  ) const
-  {
-    return ::vkDestroyShaderModule( device, shaderModule, pAllocator);
-  }
-  void vkDestroySurfaceKHR( VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks* pAllocator  ) const
-  {
-    return ::vkDestroySurfaceKHR( instance, surface, pAllocator);
-  }
-  void vkDestroySwapchainKHR( VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks* pAllocator  ) const
-  {
-    return ::vkDestroySwapchainKHR( device, swapchain, pAllocator);
-  }
-  void vkDestroyValidationCacheEXT( VkDevice device, VkValidationCacheEXT validationCache, const VkAllocationCallbacks* pAllocator  ) const
-  {
-    return ::vkDestroyValidationCacheEXT( device, validationCache, pAllocator);
-  }
-  VkResult vkDeviceWaitIdle( VkDevice device  ) const
-  {
-    return ::vkDeviceWaitIdle( device);
-  }
-  VkResult vkDisplayPowerControlEXT( VkDevice device, VkDisplayKHR display, const VkDisplayPowerInfoEXT* pDisplayPowerInfo  ) const
-  {
-    return ::vkDisplayPowerControlEXT( device, display, pDisplayPowerInfo);
-  }
-  VkResult vkEndCommandBuffer( VkCommandBuffer commandBuffer  ) const
-  {
-    return ::vkEndCommandBuffer( commandBuffer);
-  }
-  VkResult vkEnumerateDeviceExtensionProperties( VkPhysicalDevice physicalDevice, const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties  ) const
-  {
-    return ::vkEnumerateDeviceExtensionProperties( physicalDevice, pLayerName, pPropertyCount, pProperties);
-  }
-  VkResult vkEnumerateDeviceLayerProperties( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkLayerProperties* pProperties  ) const
-  {
-    return ::vkEnumerateDeviceLayerProperties( physicalDevice, pPropertyCount, pProperties);
-  }
-  VkResult vkEnumerateInstanceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties  ) const
-  {
-    return ::vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, pProperties);
-  }
-  VkResult vkEnumerateInstanceLayerProperties( uint32_t* pPropertyCount, VkLayerProperties* pProperties  ) const
-  {
-    return ::vkEnumerateInstanceLayerProperties( pPropertyCount, pProperties);
-  }
-  VkResult vkEnumerateInstanceVersion( uint32_t* pApiVersion  ) const
-  {
-    return ::vkEnumerateInstanceVersion( pApiVersion);
-  }
-  VkResult vkEnumeratePhysicalDeviceGroups( VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties  ) const
-  {
-    return ::vkEnumeratePhysicalDeviceGroups( instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
-  }
-  VkResult vkEnumeratePhysicalDeviceGroupsKHR( VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties  ) const
-  {
-    return ::vkEnumeratePhysicalDeviceGroupsKHR( instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
-  }
-  VkResult vkEnumeratePhysicalDevices( VkInstance instance, uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices  ) const
-  {
-    return ::vkEnumeratePhysicalDevices( instance, pPhysicalDeviceCount, pPhysicalDevices);
-  }
-  VkResult vkFlushMappedMemoryRanges( VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges  ) const
-  {
-    return ::vkFlushMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges);
-  }
-  void vkFreeCommandBuffers( VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers  ) const
-  {
-    return ::vkFreeCommandBuffers( device, commandPool, commandBufferCount, pCommandBuffers);
-  }
-  VkResult vkFreeDescriptorSets( VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets  ) const
-  {
-    return ::vkFreeDescriptorSets( device, descriptorPool, descriptorSetCount, pDescriptorSets);
-  }
-  void vkFreeMemory( VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks* pAllocator  ) const
-  {
-    return ::vkFreeMemory( device, memory, pAllocator);
-  }
-  VkResult vkGetAccelerationStructureHandleNV( VkDevice device, VkAccelerationStructureNV accelerationStructure, size_t dataSize, void* pData  ) const
-  {
-    return ::vkGetAccelerationStructureHandleNV( device, accelerationStructure, dataSize, pData);
-  }
-  void vkGetAccelerationStructureMemoryRequirementsNV( VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements  ) const
-  {
-    return ::vkGetAccelerationStructureMemoryRequirementsNV( device, pInfo, pMemoryRequirements);
-  }
-#ifdef VK_USE_PLATFORM_ANDROID_ANDROID
-  VkResult vkGetAndroidHardwareBufferPropertiesANDROID( VkDevice device, const struct AHardwareBuffer* buffer, VkAndroidHardwareBufferPropertiesANDROID* pProperties  ) const
-  {
-    return ::vkGetAndroidHardwareBufferPropertiesANDROID( device, buffer, pProperties);
-  }
-#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/
-  void vkGetBufferMemoryRequirements( VkDevice device, VkBuffer buffer, VkMemoryRequirements* pMemoryRequirements  ) const
-  {
-    return ::vkGetBufferMemoryRequirements( device, buffer, pMemoryRequirements);
-  }
-  void vkGetBufferMemoryRequirements2( VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements  ) const
-  {
-    return ::vkGetBufferMemoryRequirements2( device, pInfo, pMemoryRequirements);
-  }
-  void vkGetBufferMemoryRequirements2KHR( VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements  ) const
-  {
-    return ::vkGetBufferMemoryRequirements2KHR( device, pInfo, pMemoryRequirements);
-  }
-  VkResult vkGetCalibratedTimestampsEXT( VkDevice device, uint32_t timestampCount, const VkCalibratedTimestampInfoEXT* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation  ) const
-  {
-    return ::vkGetCalibratedTimestampsEXT( device, timestampCount, pTimestampInfos, pTimestamps, pMaxDeviation);
-  }
-  void vkGetDescriptorSetLayoutSupport( VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport  ) const
-  {
-    return ::vkGetDescriptorSetLayoutSupport( device, pCreateInfo, pSupport);
-  }
-  void vkGetDescriptorSetLayoutSupportKHR( VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport  ) const
-  {
-    return ::vkGetDescriptorSetLayoutSupportKHR( device, pCreateInfo, pSupport);
-  }
-  void vkGetDeviceGroupPeerMemoryFeatures( VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures  ) const
-  {
-    return ::vkGetDeviceGroupPeerMemoryFeatures( device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures);
-  }
-  void vkGetDeviceGroupPeerMemoryFeaturesKHR( VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures  ) const
-  {
-    return ::vkGetDeviceGroupPeerMemoryFeaturesKHR( device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures);
-  }
-  VkResult vkGetDeviceGroupPresentCapabilitiesKHR( VkDevice device, VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities  ) const
-  {
-    return ::vkGetDeviceGroupPresentCapabilitiesKHR( device, pDeviceGroupPresentCapabilities);
-  }
-  VkResult vkGetDeviceGroupSurfacePresentModesKHR( VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR* pModes  ) const
-  {
-    return ::vkGetDeviceGroupSurfacePresentModesKHR( device, surface, pModes);
-  }
-  void vkGetDeviceMemoryCommitment( VkDevice device, VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes  ) const
-  {
-    return ::vkGetDeviceMemoryCommitment( device, memory, pCommittedMemoryInBytes);
-  }
-  PFN_vkVoidFunction vkGetDeviceProcAddr( VkDevice device, const char* pName  ) const
-  {
-    return ::vkGetDeviceProcAddr( device, pName);
-  }
-  void vkGetDeviceQueue( VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue* pQueue  ) const
-  {
-    return ::vkGetDeviceQueue( device, queueFamilyIndex, queueIndex, pQueue);
-  }
-  void vkGetDeviceQueue2( VkDevice device, const VkDeviceQueueInfo2* pQueueInfo, VkQueue* pQueue  ) const
-  {
-    return ::vkGetDeviceQueue2( device, pQueueInfo, pQueue);
-  }
-  VkResult vkGetDisplayModeProperties2KHR( VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModeProperties2KHR* pProperties  ) const
-  {
-    return ::vkGetDisplayModeProperties2KHR( physicalDevice, display, pPropertyCount, pProperties);
-  }
-  VkResult vkGetDisplayModePropertiesKHR( VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModePropertiesKHR* pProperties  ) const
-  {
-    return ::vkGetDisplayModePropertiesKHR( physicalDevice, display, pPropertyCount, pProperties);
-  }
-  VkResult vkGetDisplayPlaneCapabilities2KHR( VkPhysicalDevice physicalDevice, const VkDisplayPlaneInfo2KHR* pDisplayPlaneInfo, VkDisplayPlaneCapabilities2KHR* pCapabilities  ) const
-  {
-    return ::vkGetDisplayPlaneCapabilities2KHR( physicalDevice, pDisplayPlaneInfo, pCapabilities);
-  }
-  VkResult vkGetDisplayPlaneCapabilitiesKHR( VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode, uint32_t planeIndex, VkDisplayPlaneCapabilitiesKHR* pCapabilities  ) const
-  {
-    return ::vkGetDisplayPlaneCapabilitiesKHR( physicalDevice, mode, planeIndex, pCapabilities);
-  }
-  VkResult vkGetDisplayPlaneSupportedDisplaysKHR( VkPhysicalDevice physicalDevice, uint32_t planeIndex, uint32_t* pDisplayCount, VkDisplayKHR* pDisplays  ) const
-  {
-    return ::vkGetDisplayPlaneSupportedDisplaysKHR( physicalDevice, planeIndex, pDisplayCount, pDisplays);
-  }
-  VkResult vkGetEventStatus( VkDevice device, VkEvent event  ) const
-  {
-    return ::vkGetEventStatus( device, event);
-  }
-  VkResult vkGetFenceFdKHR( VkDevice device, const VkFenceGetFdInfoKHR* pGetFdInfo, int* pFd  ) const
-  {
-    return ::vkGetFenceFdKHR( device, pGetFdInfo, pFd);
-  }
-  VkResult vkGetFenceStatus( VkDevice device, VkFence fence  ) const
-  {
-    return ::vkGetFenceStatus( device, fence);
-  }
+
+    VkResult vkDebugMarkerSetObjectNameEXT( VkDevice device, const VkDebugMarkerObjectNameInfoEXT* pNameInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDebugMarkerSetObjectNameEXT( device, pNameInfo );
+    }
+
+    VkResult vkDebugMarkerSetObjectTagEXT( VkDevice device, const VkDebugMarkerObjectTagInfoEXT* pTagInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDebugMarkerSetObjectTagEXT( device, pTagInfo );
+    }
+
+    void vkDebugReportMessageEXT( VkInstance instance, VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDebugReportMessageEXT( instance, flags, objectType, object, location, messageCode, pLayerPrefix, pMessage );
+    }
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VkResult vkDeferredOperationJoinKHR( VkDevice device, VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDeferredOperationJoinKHR( device, operation );
+    }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    void vkDestroyAccelerationStructureKHR( VkDevice device, VkAccelerationStructureKHR accelerationStructure, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyAccelerationStructureKHR( device, accelerationStructure, pAllocator );
+    }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+    void vkDestroyAccelerationStructureNV( VkDevice device, VkAccelerationStructureKHR accelerationStructure, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyAccelerationStructureNV( device, accelerationStructure, pAllocator );
+    }
+
+    void vkDestroyBuffer( VkDevice device, VkBuffer buffer, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyBuffer( device, buffer, pAllocator );
+    }
+
+    void vkDestroyBufferView( VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyBufferView( device, bufferView, pAllocator );
+    }
+
+    void vkDestroyCommandPool( VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyCommandPool( device, commandPool, pAllocator );
+    }
+
+    void vkDestroyDebugReportCallbackEXT( VkInstance instance, VkDebugReportCallbackEXT callback, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyDebugReportCallbackEXT( instance, callback, pAllocator );
+    }
+
+    void vkDestroyDebugUtilsMessengerEXT( VkInstance instance, VkDebugUtilsMessengerEXT messenger, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyDebugUtilsMessengerEXT( instance, messenger, pAllocator );
+    }
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    void vkDestroyDeferredOperationKHR( VkDevice device, VkDeferredOperationKHR operation, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyDeferredOperationKHR( device, operation, pAllocator );
+    }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+    void vkDestroyDescriptorPool( VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyDescriptorPool( device, descriptorPool, pAllocator );
+    }
+
+    void vkDestroyDescriptorSetLayout( VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyDescriptorSetLayout( device, descriptorSetLayout, pAllocator );
+    }
+
+    void vkDestroyDescriptorUpdateTemplate( VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyDescriptorUpdateTemplate( device, descriptorUpdateTemplate, pAllocator );
+    }
+
+    void vkDestroyDescriptorUpdateTemplateKHR( VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyDescriptorUpdateTemplateKHR( device, descriptorUpdateTemplate, pAllocator );
+    }
+
+    void vkDestroyDevice( VkDevice device, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyDevice( device, pAllocator );
+    }
+
+    void vkDestroyEvent( VkDevice device, VkEvent event, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyEvent( device, event, pAllocator );
+    }
+
+    void vkDestroyFence( VkDevice device, VkFence fence, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyFence( device, fence, pAllocator );
+    }
+
+    void vkDestroyFramebuffer( VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyFramebuffer( device, framebuffer, pAllocator );
+    }
+
+    void vkDestroyImage( VkDevice device, VkImage image, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyImage( device, image, pAllocator );
+    }
+
+    void vkDestroyImageView( VkDevice device, VkImageView imageView, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyImageView( device, imageView, pAllocator );
+    }
+
+    void vkDestroyIndirectCommandsLayoutNV( VkDevice device, VkIndirectCommandsLayoutNV indirectCommandsLayout, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyIndirectCommandsLayoutNV( device, indirectCommandsLayout, pAllocator );
+    }
+
+    void vkDestroyInstance( VkInstance instance, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyInstance( instance, pAllocator );
+    }
+
+    void vkDestroyPipeline( VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyPipeline( device, pipeline, pAllocator );
+    }
+
+    void vkDestroyPipelineCache( VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyPipelineCache( device, pipelineCache, pAllocator );
+    }
+
+    void vkDestroyPipelineLayout( VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyPipelineLayout( device, pipelineLayout, pAllocator );
+    }
+
+    void vkDestroyPrivateDataSlotEXT( VkDevice device, VkPrivateDataSlotEXT privateDataSlot, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyPrivateDataSlotEXT( device, privateDataSlot, pAllocator );
+    }
+
+    void vkDestroyQueryPool( VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyQueryPool( device, queryPool, pAllocator );
+    }
+
+    void vkDestroyRenderPass( VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyRenderPass( device, renderPass, pAllocator );
+    }
+
+    void vkDestroySampler( VkDevice device, VkSampler sampler, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroySampler( device, sampler, pAllocator );
+    }
+
+    void vkDestroySamplerYcbcrConversion( VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroySamplerYcbcrConversion( device, ycbcrConversion, pAllocator );
+    }
+
+    void vkDestroySamplerYcbcrConversionKHR( VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroySamplerYcbcrConversionKHR( device, ycbcrConversion, pAllocator );
+    }
+
+    void vkDestroySemaphore( VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroySemaphore( device, semaphore, pAllocator );
+    }
+
+    void vkDestroyShaderModule( VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyShaderModule( device, shaderModule, pAllocator );
+    }
+
+    void vkDestroySurfaceKHR( VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroySurfaceKHR( instance, surface, pAllocator );
+    }
+
+    void vkDestroySwapchainKHR( VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroySwapchainKHR( device, swapchain, pAllocator );
+    }
+
+    void vkDestroyValidationCacheEXT( VkDevice device, VkValidationCacheEXT validationCache, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyValidationCacheEXT( device, validationCache, pAllocator );
+    }
+
+    VkResult vkDeviceWaitIdle( VkDevice device ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDeviceWaitIdle( device );
+    }
+
+    VkResult vkDisplayPowerControlEXT( VkDevice device, VkDisplayKHR display, const VkDisplayPowerInfoEXT* pDisplayPowerInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDisplayPowerControlEXT( device, display, pDisplayPowerInfo );
+    }
+
+    VkResult vkEndCommandBuffer( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkEndCommandBuffer( commandBuffer );
+    }
+
+    VkResult vkEnumerateDeviceExtensionProperties( VkPhysicalDevice physicalDevice, const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkEnumerateDeviceExtensionProperties( physicalDevice, pLayerName, pPropertyCount, pProperties );
+    }
+
+    VkResult vkEnumerateDeviceLayerProperties( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkLayerProperties* pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkEnumerateDeviceLayerProperties( physicalDevice, pPropertyCount, pProperties );
+    }
+
+    VkResult vkEnumerateInstanceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, pProperties );
+    }
+
+    VkResult vkEnumerateInstanceLayerProperties( uint32_t* pPropertyCount, VkLayerProperties* pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkEnumerateInstanceLayerProperties( pPropertyCount, pProperties );
+    }
+
+    VkResult vkEnumerateInstanceVersion( uint32_t* pApiVersion ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkEnumerateInstanceVersion( pApiVersion );
+    }
+
+    VkResult vkEnumeratePhysicalDeviceGroups( VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkEnumeratePhysicalDeviceGroups( instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties );
+    }
+
+    VkResult vkEnumeratePhysicalDeviceGroupsKHR( VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkEnumeratePhysicalDeviceGroupsKHR( instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties );
+    }
+
+    VkResult vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, uint32_t* pCounterCount, VkPerformanceCounterKHR* pCounters, VkPerformanceCounterDescriptionKHR* pCounterDescriptions ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( physicalDevice, queueFamilyIndex, pCounterCount, pCounters, pCounterDescriptions );
+    }
+
+    VkResult vkEnumeratePhysicalDevices( VkInstance instance, uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkEnumeratePhysicalDevices( instance, pPhysicalDeviceCount, pPhysicalDevices );
+    }
+
+    VkResult vkFlushMappedMemoryRanges( VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkFlushMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges );
+    }
+
+    void vkFreeCommandBuffers( VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkFreeCommandBuffers( device, commandPool, commandBufferCount, pCommandBuffers );
+    }
+
+    VkResult vkFreeDescriptorSets( VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkFreeDescriptorSets( device, descriptorPool, descriptorSetCount, pDescriptorSets );
+    }
+
+    void vkFreeMemory( VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkFreeMemory( device, memory, pAllocator );
+    }
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VkDeviceAddress vkGetAccelerationStructureDeviceAddressKHR( VkDevice device, const VkAccelerationStructureDeviceAddressInfoKHR* pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetAccelerationStructureDeviceAddressKHR( device, pInfo );
+    }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+    VkResult vkGetAccelerationStructureHandleNV( VkDevice device, VkAccelerationStructureKHR accelerationStructure, size_t dataSize, void* pData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetAccelerationStructureHandleNV( device, accelerationStructure, dataSize, pData );
+    }
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    void vkGetAccelerationStructureMemoryRequirementsKHR( VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoKHR* pInfo, VkMemoryRequirements2* pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetAccelerationStructureMemoryRequirementsKHR( device, pInfo, pMemoryRequirements );
+    }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+    void vkGetAccelerationStructureMemoryRequirementsNV( VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetAccelerationStructureMemoryRequirementsNV( device, pInfo, pMemoryRequirements );
+    }
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+    VkResult vkGetAndroidHardwareBufferPropertiesANDROID( VkDevice device, const struct AHardwareBuffer* buffer, VkAndroidHardwareBufferPropertiesANDROID* pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetAndroidHardwareBufferPropertiesANDROID( device, buffer, pProperties );
+    }
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+    VkDeviceAddress vkGetBufferDeviceAddress( VkDevice device, const VkBufferDeviceAddressInfo* pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetBufferDeviceAddress( device, pInfo );
+    }
+
+    VkDeviceAddress vkGetBufferDeviceAddressEXT( VkDevice device, const VkBufferDeviceAddressInfo* pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetBufferDeviceAddressEXT( device, pInfo );
+    }
+
+    VkDeviceAddress vkGetBufferDeviceAddressKHR( VkDevice device, const VkBufferDeviceAddressInfo* pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetBufferDeviceAddressKHR( device, pInfo );
+    }
+
+    void vkGetBufferMemoryRequirements( VkDevice device, VkBuffer buffer, VkMemoryRequirements* pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetBufferMemoryRequirements( device, buffer, pMemoryRequirements );
+    }
+
+    void vkGetBufferMemoryRequirements2( VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetBufferMemoryRequirements2( device, pInfo, pMemoryRequirements );
+    }
+
+    void vkGetBufferMemoryRequirements2KHR( VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetBufferMemoryRequirements2KHR( device, pInfo, pMemoryRequirements );
+    }
+
+    uint64_t vkGetBufferOpaqueCaptureAddress( VkDevice device, const VkBufferDeviceAddressInfo* pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetBufferOpaqueCaptureAddress( device, pInfo );
+    }
+
+    uint64_t vkGetBufferOpaqueCaptureAddressKHR( VkDevice device, const VkBufferDeviceAddressInfo* pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetBufferOpaqueCaptureAddressKHR( device, pInfo );
+    }
+
+    VkResult vkGetCalibratedTimestampsEXT( VkDevice device, uint32_t timestampCount, const VkCalibratedTimestampInfoEXT* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetCalibratedTimestampsEXT( device, timestampCount, pTimestampInfos, pTimestamps, pMaxDeviation );
+    }
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    uint32_t vkGetDeferredOperationMaxConcurrencyKHR( VkDevice device, VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeferredOperationMaxConcurrencyKHR( device, operation );
+    }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VkResult vkGetDeferredOperationResultKHR( VkDevice device, VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeferredOperationResultKHR( device, operation );
+    }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+    void vkGetDescriptorSetLayoutSupport( VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDescriptorSetLayoutSupport( device, pCreateInfo, pSupport );
+    }
+
+    void vkGetDescriptorSetLayoutSupportKHR( VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDescriptorSetLayoutSupportKHR( device, pCreateInfo, pSupport );
+    }
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VkResult vkGetDeviceAccelerationStructureCompatibilityKHR( VkDevice device, const VkAccelerationStructureVersionKHR* version ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceAccelerationStructureCompatibilityKHR( device, version );
+    }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+    void vkGetDeviceGroupPeerMemoryFeatures( VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceGroupPeerMemoryFeatures( device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures );
+    }
+
+    void vkGetDeviceGroupPeerMemoryFeaturesKHR( VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceGroupPeerMemoryFeaturesKHR( device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures );
+    }
+
+    VkResult vkGetDeviceGroupPresentCapabilitiesKHR( VkDevice device, VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceGroupPresentCapabilitiesKHR( device, pDeviceGroupPresentCapabilities );
+    }
+
 #ifdef VK_USE_PLATFORM_WIN32_KHR
-  VkResult vkGetFenceWin32HandleKHR( VkDevice device, const VkFenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle  ) const
-  {
-    return ::vkGetFenceWin32HandleKHR( device, pGetWin32HandleInfo, pHandle);
-  }
+    VkResult vkGetDeviceGroupSurfacePresentModes2EXT( VkDevice device, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkDeviceGroupPresentModeFlagsKHR* pModes ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceGroupSurfacePresentModes2EXT( device, pSurfaceInfo, pModes );
+    }
 #endif /*VK_USE_PLATFORM_WIN32_KHR*/
-  VkResult vkGetImageDrmFormatModifierPropertiesEXT( VkDevice device, VkImage image, VkImageDrmFormatModifierPropertiesEXT* pProperties  ) const
-  {
-    return ::vkGetImageDrmFormatModifierPropertiesEXT( device, image, pProperties);
-  }
-  void vkGetImageMemoryRequirements( VkDevice device, VkImage image, VkMemoryRequirements* pMemoryRequirements  ) const
-  {
-    return ::vkGetImageMemoryRequirements( device, image, pMemoryRequirements);
-  }
-  void vkGetImageMemoryRequirements2( VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements  ) const
-  {
-    return ::vkGetImageMemoryRequirements2( device, pInfo, pMemoryRequirements);
-  }
-  void vkGetImageMemoryRequirements2KHR( VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements  ) const
-  {
-    return ::vkGetImageMemoryRequirements2KHR( device, pInfo, pMemoryRequirements);
-  }
-  void vkGetImageSparseMemoryRequirements( VkDevice device, VkImage image, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements* pSparseMemoryRequirements  ) const
-  {
-    return ::vkGetImageSparseMemoryRequirements( device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
-  }
-  void vkGetImageSparseMemoryRequirements2( VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements  ) const
-  {
-    return ::vkGetImageSparseMemoryRequirements2( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
-  }
-  void vkGetImageSparseMemoryRequirements2KHR( VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements  ) const
-  {
-    return ::vkGetImageSparseMemoryRequirements2KHR( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
-  }
-  void vkGetImageSubresourceLayout( VkDevice device, VkImage image, const VkImageSubresource* pSubresource, VkSubresourceLayout* pLayout  ) const
-  {
-    return ::vkGetImageSubresourceLayout( device, image, pSubresource, pLayout);
-  }
-  PFN_vkVoidFunction vkGetInstanceProcAddr( VkInstance instance, const char* pName  ) const
-  {
-    return ::vkGetInstanceProcAddr( instance, pName);
-  }
-#ifdef VK_USE_PLATFORM_ANDROID_ANDROID
-  VkResult vkGetMemoryAndroidHardwareBufferANDROID( VkDevice device, const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer  ) const
-  {
-    return ::vkGetMemoryAndroidHardwareBufferANDROID( device, pInfo, pBuffer);
-  }
-#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/
-  VkResult vkGetMemoryFdKHR( VkDevice device, const VkMemoryGetFdInfoKHR* pGetFdInfo, int* pFd  ) const
-  {
-    return ::vkGetMemoryFdKHR( device, pGetFdInfo, pFd);
-  }
-  VkResult vkGetMemoryFdPropertiesKHR( VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, int fd, VkMemoryFdPropertiesKHR* pMemoryFdProperties  ) const
-  {
-    return ::vkGetMemoryFdPropertiesKHR( device, handleType, fd, pMemoryFdProperties);
-  }
-  VkResult vkGetMemoryHostPointerPropertiesEXT( VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties  ) const
-  {
-    return ::vkGetMemoryHostPointerPropertiesEXT( device, handleType, pHostPointer, pMemoryHostPointerProperties);
-  }
+
+    VkResult vkGetDeviceGroupSurfacePresentModesKHR( VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR* pModes ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceGroupSurfacePresentModesKHR( device, surface, pModes );
+    }
+
+    void vkGetDeviceMemoryCommitment( VkDevice device, VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceMemoryCommitment( device, memory, pCommittedMemoryInBytes );
+    }
+
+    uint64_t vkGetDeviceMemoryOpaqueCaptureAddress( VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceMemoryOpaqueCaptureAddress( device, pInfo );
+    }
+
+    uint64_t vkGetDeviceMemoryOpaqueCaptureAddressKHR( VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceMemoryOpaqueCaptureAddressKHR( device, pInfo );
+    }
+
+    PFN_vkVoidFunction vkGetDeviceProcAddr( VkDevice device, const char* pName ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceProcAddr( device, pName );
+    }
+
+    void vkGetDeviceQueue( VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue* pQueue ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceQueue( device, queueFamilyIndex, queueIndex, pQueue );
+    }
+
+    void vkGetDeviceQueue2( VkDevice device, const VkDeviceQueueInfo2* pQueueInfo, VkQueue* pQueue ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceQueue2( device, pQueueInfo, pQueue );
+    }
+
+    VkResult vkGetDisplayModeProperties2KHR( VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModeProperties2KHR* pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDisplayModeProperties2KHR( physicalDevice, display, pPropertyCount, pProperties );
+    }
+
+    VkResult vkGetDisplayModePropertiesKHR( VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModePropertiesKHR* pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDisplayModePropertiesKHR( physicalDevice, display, pPropertyCount, pProperties );
+    }
+
+    VkResult vkGetDisplayPlaneCapabilities2KHR( VkPhysicalDevice physicalDevice, const VkDisplayPlaneInfo2KHR* pDisplayPlaneInfo, VkDisplayPlaneCapabilities2KHR* pCapabilities ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDisplayPlaneCapabilities2KHR( physicalDevice, pDisplayPlaneInfo, pCapabilities );
+    }
+
+    VkResult vkGetDisplayPlaneCapabilitiesKHR( VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode, uint32_t planeIndex, VkDisplayPlaneCapabilitiesKHR* pCapabilities ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDisplayPlaneCapabilitiesKHR( physicalDevice, mode, planeIndex, pCapabilities );
+    }
+
+    VkResult vkGetDisplayPlaneSupportedDisplaysKHR( VkPhysicalDevice physicalDevice, uint32_t planeIndex, uint32_t* pDisplayCount, VkDisplayKHR* pDisplays ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDisplayPlaneSupportedDisplaysKHR( physicalDevice, planeIndex, pDisplayCount, pDisplays );
+    }
+
+    VkResult vkGetEventStatus( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetEventStatus( device, event );
+    }
+
+    VkResult vkGetFenceFdKHR( VkDevice device, const VkFenceGetFdInfoKHR* pGetFdInfo, int* pFd ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetFenceFdKHR( device, pGetFdInfo, pFd );
+    }
+
+    VkResult vkGetFenceStatus( VkDevice device, VkFence fence ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetFenceStatus( device, fence );
+    }
+
 #ifdef VK_USE_PLATFORM_WIN32_KHR
-  VkResult vkGetMemoryWin32HandleKHR( VkDevice device, const VkMemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle  ) const
-  {
-    return ::vkGetMemoryWin32HandleKHR( device, pGetWin32HandleInfo, pHandle);
-  }
+    VkResult vkGetFenceWin32HandleKHR( VkDevice device, const VkFenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetFenceWin32HandleKHR( device, pGetWin32HandleInfo, pHandle );
+    }
 #endif /*VK_USE_PLATFORM_WIN32_KHR*/
-#ifdef VK_USE_PLATFORM_WIN32_NV
-  VkResult vkGetMemoryWin32HandleNV( VkDevice device, VkDeviceMemory memory, VkExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle  ) const
-  {
-    return ::vkGetMemoryWin32HandleNV( device, memory, handleType, pHandle);
-  }
-#endif /*VK_USE_PLATFORM_WIN32_NV*/
+
+    void vkGetGeneratedCommandsMemoryRequirementsNV( VkDevice device, const VkGeneratedCommandsMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2* pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetGeneratedCommandsMemoryRequirementsNV( device, pInfo, pMemoryRequirements );
+    }
+
+    VkResult vkGetImageDrmFormatModifierPropertiesEXT( VkDevice device, VkImage image, VkImageDrmFormatModifierPropertiesEXT* pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetImageDrmFormatModifierPropertiesEXT( device, image, pProperties );
+    }
+
+    void vkGetImageMemoryRequirements( VkDevice device, VkImage image, VkMemoryRequirements* pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetImageMemoryRequirements( device, image, pMemoryRequirements );
+    }
+
+    void vkGetImageMemoryRequirements2( VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetImageMemoryRequirements2( device, pInfo, pMemoryRequirements );
+    }
+
+    void vkGetImageMemoryRequirements2KHR( VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetImageMemoryRequirements2KHR( device, pInfo, pMemoryRequirements );
+    }
+
+    void vkGetImageSparseMemoryRequirements( VkDevice device, VkImage image, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements* pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetImageSparseMemoryRequirements( device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements );
+    }
+
+    void vkGetImageSparseMemoryRequirements2( VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetImageSparseMemoryRequirements2( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements );
+    }
+
+    void vkGetImageSparseMemoryRequirements2KHR( VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetImageSparseMemoryRequirements2KHR( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements );
+    }
+
+    void vkGetImageSubresourceLayout( VkDevice device, VkImage image, const VkImageSubresource* pSubresource, VkSubresourceLayout* pLayout ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetImageSubresourceLayout( device, image, pSubresource, pLayout );
+    }
+
+    VkResult vkGetImageViewAddressNVX( VkDevice device, VkImageView imageView, VkImageViewAddressPropertiesNVX* pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetImageViewAddressNVX( device, imageView, pProperties );
+    }
+
+    uint32_t vkGetImageViewHandleNVX( VkDevice device, const VkImageViewHandleInfoNVX* pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetImageViewHandleNVX( device, pInfo );
+    }
+
+    PFN_vkVoidFunction vkGetInstanceProcAddr( VkInstance instance, const char* pName ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetInstanceProcAddr( instance, pName );
+    }
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+    VkResult vkGetMemoryAndroidHardwareBufferANDROID( VkDevice device, const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetMemoryAndroidHardwareBufferANDROID( device, pInfo, pBuffer );
+    }
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+    VkResult vkGetMemoryFdKHR( VkDevice device, const VkMemoryGetFdInfoKHR* pGetFdInfo, int* pFd ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetMemoryFdKHR( device, pGetFdInfo, pFd );
+    }
+
+    VkResult vkGetMemoryFdPropertiesKHR( VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, int fd, VkMemoryFdPropertiesKHR* pMemoryFdProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetMemoryFdPropertiesKHR( device, handleType, fd, pMemoryFdProperties );
+    }
+
+    VkResult vkGetMemoryHostPointerPropertiesEXT( VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetMemoryHostPointerPropertiesEXT( device, handleType, pHostPointer, pMemoryHostPointerProperties );
+    }
+
 #ifdef VK_USE_PLATFORM_WIN32_KHR
-  VkResult vkGetMemoryWin32HandlePropertiesKHR( VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, VkMemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties  ) const
-  {
-    return ::vkGetMemoryWin32HandlePropertiesKHR( device, handleType, handle, pMemoryWin32HandleProperties);
-  }
+    VkResult vkGetMemoryWin32HandleKHR( VkDevice device, const VkMemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetMemoryWin32HandleKHR( device, pGetWin32HandleInfo, pHandle );
+    }
 #endif /*VK_USE_PLATFORM_WIN32_KHR*/
-  VkResult vkGetPastPresentationTimingGOOGLE( VkDevice device, VkSwapchainKHR swapchain, uint32_t* pPresentationTimingCount, VkPastPresentationTimingGOOGLE* pPresentationTimings  ) const
-  {
-    return ::vkGetPastPresentationTimingGOOGLE( device, swapchain, pPresentationTimingCount, pPresentationTimings);
-  }
-  VkResult vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( VkPhysicalDevice physicalDevice, uint32_t* pTimeDomainCount, VkTimeDomainEXT* pTimeDomains  ) const
-  {
-    return ::vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( physicalDevice, pTimeDomainCount, pTimeDomains);
-  }
-  VkResult vkGetPhysicalDeviceDisplayPlaneProperties2KHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlaneProperties2KHR* pProperties  ) const
-  {
-    return ::vkGetPhysicalDeviceDisplayPlaneProperties2KHR( physicalDevice, pPropertyCount, pProperties);
-  }
-  VkResult vkGetPhysicalDeviceDisplayPlanePropertiesKHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlanePropertiesKHR* pProperties  ) const
-  {
-    return ::vkGetPhysicalDeviceDisplayPlanePropertiesKHR( physicalDevice, pPropertyCount, pProperties);
-  }
-  VkResult vkGetPhysicalDeviceDisplayProperties2KHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayProperties2KHR* pProperties  ) const
-  {
-    return ::vkGetPhysicalDeviceDisplayProperties2KHR( physicalDevice, pPropertyCount, pProperties);
-  }
-  VkResult vkGetPhysicalDeviceDisplayPropertiesKHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPropertiesKHR* pProperties  ) const
-  {
-    return ::vkGetPhysicalDeviceDisplayPropertiesKHR( physicalDevice, pPropertyCount, pProperties);
-  }
-  void vkGetPhysicalDeviceExternalBufferProperties( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties  ) const
-  {
-    return ::vkGetPhysicalDeviceExternalBufferProperties( physicalDevice, pExternalBufferInfo, pExternalBufferProperties);
-  }
-  void vkGetPhysicalDeviceExternalBufferPropertiesKHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties  ) const
-  {
-    return ::vkGetPhysicalDeviceExternalBufferPropertiesKHR( physicalDevice, pExternalBufferInfo, pExternalBufferProperties);
-  }
-  void vkGetPhysicalDeviceExternalFenceProperties( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties  ) const
-  {
-    return ::vkGetPhysicalDeviceExternalFenceProperties( physicalDevice, pExternalFenceInfo, pExternalFenceProperties);
-  }
-  void vkGetPhysicalDeviceExternalFencePropertiesKHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties  ) const
-  {
-    return ::vkGetPhysicalDeviceExternalFencePropertiesKHR( physicalDevice, pExternalFenceInfo, pExternalFenceProperties);
-  }
-  VkResult vkGetPhysicalDeviceExternalImageFormatPropertiesNV( VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkExternalMemoryHandleTypeFlagsNV externalHandleType, VkExternalImageFormatPropertiesNV* pExternalImageFormatProperties  ) const
-  {
-    return ::vkGetPhysicalDeviceExternalImageFormatPropertiesNV( physicalDevice, format, type, tiling, usage, flags, externalHandleType, pExternalImageFormatProperties);
-  }
-  void vkGetPhysicalDeviceExternalSemaphoreProperties( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties  ) const
-  {
-    return ::vkGetPhysicalDeviceExternalSemaphoreProperties( physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties);
-  }
-  void vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties  ) const
-  {
-    return ::vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties);
-  }
-  void vkGetPhysicalDeviceFeatures( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures  ) const
-  {
-    return ::vkGetPhysicalDeviceFeatures( physicalDevice, pFeatures);
-  }
-  void vkGetPhysicalDeviceFeatures2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures  ) const
-  {
-    return ::vkGetPhysicalDeviceFeatures2( physicalDevice, pFeatures);
-  }
-  void vkGetPhysicalDeviceFeatures2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures  ) const
-  {
-    return ::vkGetPhysicalDeviceFeatures2KHR( physicalDevice, pFeatures);
-  }
-  void vkGetPhysicalDeviceFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties* pFormatProperties  ) const
-  {
-    return ::vkGetPhysicalDeviceFormatProperties( physicalDevice, format, pFormatProperties);
-  }
-  void vkGetPhysicalDeviceFormatProperties2( VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties  ) const
-  {
-    return ::vkGetPhysicalDeviceFormatProperties2( physicalDevice, format, pFormatProperties);
-  }
-  void vkGetPhysicalDeviceFormatProperties2KHR( VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties  ) const
-  {
-    return ::vkGetPhysicalDeviceFormatProperties2KHR( physicalDevice, format, pFormatProperties);
-  }
-  void vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( VkPhysicalDevice physicalDevice, VkDeviceGeneratedCommandsFeaturesNVX* pFeatures, VkDeviceGeneratedCommandsLimitsNVX* pLimits  ) const
-  {
-    return ::vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( physicalDevice, pFeatures, pLimits);
-  }
-  VkResult vkGetPhysicalDeviceImageFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties  ) const
-  {
-    return ::vkGetPhysicalDeviceImageFormatProperties( physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties);
-  }
-  VkResult vkGetPhysicalDeviceImageFormatProperties2( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties  ) const
-  {
-    return ::vkGetPhysicalDeviceImageFormatProperties2( physicalDevice, pImageFormatInfo, pImageFormatProperties);
-  }
-  VkResult vkGetPhysicalDeviceImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties  ) const
-  {
-    return ::vkGetPhysicalDeviceImageFormatProperties2KHR( physicalDevice, pImageFormatInfo, pImageFormatProperties);
-  }
-  void vkGetPhysicalDeviceMemoryProperties( VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties* pMemoryProperties  ) const
-  {
-    return ::vkGetPhysicalDeviceMemoryProperties( physicalDevice, pMemoryProperties);
-  }
-  void vkGetPhysicalDeviceMemoryProperties2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties  ) const
-  {
-    return ::vkGetPhysicalDeviceMemoryProperties2( physicalDevice, pMemoryProperties);
-  }
-  void vkGetPhysicalDeviceMemoryProperties2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties  ) const
-  {
-    return ::vkGetPhysicalDeviceMemoryProperties2KHR( physicalDevice, pMemoryProperties);
-  }
-  void vkGetPhysicalDeviceMultisamplePropertiesEXT( VkPhysicalDevice physicalDevice, VkSampleCountFlagBits samples, VkMultisamplePropertiesEXT* pMultisampleProperties  ) const
-  {
-    return ::vkGetPhysicalDeviceMultisamplePropertiesEXT( physicalDevice, samples, pMultisampleProperties);
-  }
-  VkResult vkGetPhysicalDevicePresentRectanglesKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pRectCount, VkRect2D* pRects  ) const
-  {
-    return ::vkGetPhysicalDevicePresentRectanglesKHR( physicalDevice, surface, pRectCount, pRects);
-  }
-  void vkGetPhysicalDeviceProperties( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties* pProperties  ) const
-  {
-    return ::vkGetPhysicalDeviceProperties( physicalDevice, pProperties);
-  }
-  void vkGetPhysicalDeviceProperties2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties  ) const
-  {
-    return ::vkGetPhysicalDeviceProperties2( physicalDevice, pProperties);
-  }
-  void vkGetPhysicalDeviceProperties2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties  ) const
-  {
-    return ::vkGetPhysicalDeviceProperties2KHR( physicalDevice, pProperties);
-  }
-  void vkGetPhysicalDeviceQueueFamilyProperties( VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties* pQueueFamilyProperties  ) const
-  {
-    return ::vkGetPhysicalDeviceQueueFamilyProperties( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties);
-  }
-  void vkGetPhysicalDeviceQueueFamilyProperties2( VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties  ) const
-  {
-    return ::vkGetPhysicalDeviceQueueFamilyProperties2( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties);
-  }
-  void vkGetPhysicalDeviceQueueFamilyProperties2KHR( VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties  ) const
-  {
-    return ::vkGetPhysicalDeviceQueueFamilyProperties2KHR( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties);
-  }
-  void vkGetPhysicalDeviceSparseImageFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t* pPropertyCount, VkSparseImageFormatProperties* pProperties  ) const
-  {
-    return ::vkGetPhysicalDeviceSparseImageFormatProperties( physicalDevice, format, type, samples, usage, tiling, pPropertyCount, pProperties);
-  }
-  void vkGetPhysicalDeviceSparseImageFormatProperties2( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties  ) const
-  {
-    return ::vkGetPhysicalDeviceSparseImageFormatProperties2( physicalDevice, pFormatInfo, pPropertyCount, pProperties);
-  }
-  void vkGetPhysicalDeviceSparseImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties  ) const
-  {
-    return ::vkGetPhysicalDeviceSparseImageFormatProperties2KHR( physicalDevice, pFormatInfo, pPropertyCount, pProperties);
-  }
-  VkResult vkGetPhysicalDeviceSurfaceCapabilities2EXT( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilities2EXT* pSurfaceCapabilities  ) const
-  {
-    return ::vkGetPhysicalDeviceSurfaceCapabilities2EXT( physicalDevice, surface, pSurfaceCapabilities);
-  }
-  VkResult vkGetPhysicalDeviceSurfaceCapabilities2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkSurfaceCapabilities2KHR* pSurfaceCapabilities  ) const
-  {
-    return ::vkGetPhysicalDeviceSurfaceCapabilities2KHR( physicalDevice, pSurfaceInfo, pSurfaceCapabilities);
-  }
-  VkResult vkGetPhysicalDeviceSurfaceCapabilitiesKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilitiesKHR* pSurfaceCapabilities  ) const
-  {
-    return ::vkGetPhysicalDeviceSurfaceCapabilitiesKHR( physicalDevice, surface, pSurfaceCapabilities);
-  }
-  VkResult vkGetPhysicalDeviceSurfaceFormats2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, VkSurfaceFormat2KHR* pSurfaceFormats  ) const
-  {
-    return ::vkGetPhysicalDeviceSurfaceFormats2KHR( physicalDevice, pSurfaceInfo, pSurfaceFormatCount, pSurfaceFormats);
-  }
-  VkResult vkGetPhysicalDeviceSurfaceFormatsKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pSurfaceFormatCount, VkSurfaceFormatKHR* pSurfaceFormats  ) const
-  {
-    return ::vkGetPhysicalDeviceSurfaceFormatsKHR( physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats);
-  }
-  VkResult vkGetPhysicalDeviceSurfacePresentModesKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes  ) const
-  {
-    return ::vkGetPhysicalDeviceSurfacePresentModesKHR( physicalDevice, surface, pPresentModeCount, pPresentModes);
-  }
-  VkResult vkGetPhysicalDeviceSurfaceSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, VkSurfaceKHR surface, VkBool32* pSupported  ) const
-  {
-    return ::vkGetPhysicalDeviceSurfaceSupportKHR( physicalDevice, queueFamilyIndex, surface, pSupported);
-  }
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    VkResult vkGetMemoryWin32HandleNV( VkDevice device, VkDeviceMemory memory, VkExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetMemoryWin32HandleNV( device, memory, handleType, pHandle );
+    }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    VkResult vkGetMemoryWin32HandlePropertiesKHR( VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, VkMemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetMemoryWin32HandlePropertiesKHR( device, handleType, handle, pMemoryWin32HandleProperties );
+    }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+    VkResult vkGetPastPresentationTimingGOOGLE( VkDevice device, VkSwapchainKHR swapchain, uint32_t* pPresentationTimingCount, VkPastPresentationTimingGOOGLE* pPresentationTimings ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPastPresentationTimingGOOGLE( device, swapchain, pPresentationTimingCount, pPresentationTimings );
+    }
+
+    VkResult vkGetPerformanceParameterINTEL( VkDevice device, VkPerformanceParameterTypeINTEL parameter, VkPerformanceValueINTEL* pValue ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPerformanceParameterINTEL( device, parameter, pValue );
+    }
+
+    VkResult vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( VkPhysicalDevice physicalDevice, uint32_t* pTimeDomainCount, VkTimeDomainEXT* pTimeDomains ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( physicalDevice, pTimeDomainCount, pTimeDomains );
+    }
+
+    VkResult vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkCooperativeMatrixPropertiesNV* pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( physicalDevice, pPropertyCount, pProperties );
+    }
+
+#ifdef VK_USE_PLATFORM_DIRECTFB_EXT
+    VkBool32 vkGetPhysicalDeviceDirectFBPresentationSupportEXT( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, IDirectFB* dfb ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceDirectFBPresentationSupportEXT( physicalDevice, queueFamilyIndex, dfb );
+    }
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
+
+    VkResult vkGetPhysicalDeviceDisplayPlaneProperties2KHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlaneProperties2KHR* pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceDisplayPlaneProperties2KHR( physicalDevice, pPropertyCount, pProperties );
+    }
+
+    VkResult vkGetPhysicalDeviceDisplayPlanePropertiesKHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlanePropertiesKHR* pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceDisplayPlanePropertiesKHR( physicalDevice, pPropertyCount, pProperties );
+    }
+
+    VkResult vkGetPhysicalDeviceDisplayProperties2KHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayProperties2KHR* pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceDisplayProperties2KHR( physicalDevice, pPropertyCount, pProperties );
+    }
+
+    VkResult vkGetPhysicalDeviceDisplayPropertiesKHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPropertiesKHR* pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceDisplayPropertiesKHR( physicalDevice, pPropertyCount, pProperties );
+    }
+
+    void vkGetPhysicalDeviceExternalBufferProperties( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceExternalBufferProperties( physicalDevice, pExternalBufferInfo, pExternalBufferProperties );
+    }
+
+    void vkGetPhysicalDeviceExternalBufferPropertiesKHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceExternalBufferPropertiesKHR( physicalDevice, pExternalBufferInfo, pExternalBufferProperties );
+    }
+
+    void vkGetPhysicalDeviceExternalFenceProperties( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceExternalFenceProperties( physicalDevice, pExternalFenceInfo, pExternalFenceProperties );
+    }
+
+    void vkGetPhysicalDeviceExternalFencePropertiesKHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceExternalFencePropertiesKHR( physicalDevice, pExternalFenceInfo, pExternalFenceProperties );
+    }
+
+    VkResult vkGetPhysicalDeviceExternalImageFormatPropertiesNV( VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkExternalMemoryHandleTypeFlagsNV externalHandleType, VkExternalImageFormatPropertiesNV* pExternalImageFormatProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceExternalImageFormatPropertiesNV( physicalDevice, format, type, tiling, usage, flags, externalHandleType, pExternalImageFormatProperties );
+    }
+
+    void vkGetPhysicalDeviceExternalSemaphoreProperties( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceExternalSemaphoreProperties( physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties );
+    }
+
+    void vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties );
+    }
+
+    void vkGetPhysicalDeviceFeatures( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceFeatures( physicalDevice, pFeatures );
+    }
+
+    void vkGetPhysicalDeviceFeatures2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceFeatures2( physicalDevice, pFeatures );
+    }
+
+    void vkGetPhysicalDeviceFeatures2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceFeatures2KHR( physicalDevice, pFeatures );
+    }
+
+    void vkGetPhysicalDeviceFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties* pFormatProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceFormatProperties( physicalDevice, format, pFormatProperties );
+    }
+
+    void vkGetPhysicalDeviceFormatProperties2( VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceFormatProperties2( physicalDevice, format, pFormatProperties );
+    }
+
+    void vkGetPhysicalDeviceFormatProperties2KHR( VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceFormatProperties2KHR( physicalDevice, format, pFormatProperties );
+    }
+
+    VkResult vkGetPhysicalDeviceFragmentShadingRatesKHR( VkPhysicalDevice physicalDevice, uint32_t* pFragmentShadingRateCount, VkPhysicalDeviceFragmentShadingRateKHR* pFragmentShadingRates ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceFragmentShadingRatesKHR( physicalDevice, pFragmentShadingRateCount, pFragmentShadingRates );
+    }
+
+    VkResult vkGetPhysicalDeviceImageFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceImageFormatProperties( physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties );
+    }
+
+    VkResult vkGetPhysicalDeviceImageFormatProperties2( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceImageFormatProperties2( physicalDevice, pImageFormatInfo, pImageFormatProperties );
+    }
+
+    VkResult vkGetPhysicalDeviceImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceImageFormatProperties2KHR( physicalDevice, pImageFormatInfo, pImageFormatProperties );
+    }
+
+    void vkGetPhysicalDeviceMemoryProperties( VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties* pMemoryProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceMemoryProperties( physicalDevice, pMemoryProperties );
+    }
+
+    void vkGetPhysicalDeviceMemoryProperties2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceMemoryProperties2( physicalDevice, pMemoryProperties );
+    }
+
+    void vkGetPhysicalDeviceMemoryProperties2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceMemoryProperties2KHR( physicalDevice, pMemoryProperties );
+    }
+
+    void vkGetPhysicalDeviceMultisamplePropertiesEXT( VkPhysicalDevice physicalDevice, VkSampleCountFlagBits samples, VkMultisamplePropertiesEXT* pMultisampleProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceMultisamplePropertiesEXT( physicalDevice, samples, pMultisampleProperties );
+    }
+
+    VkResult vkGetPhysicalDevicePresentRectanglesKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pRectCount, VkRect2D* pRects ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDevicePresentRectanglesKHR( physicalDevice, surface, pRectCount, pRects );
+    }
+
+    void vkGetPhysicalDeviceProperties( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties* pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceProperties( physicalDevice, pProperties );
+    }
+
+    void vkGetPhysicalDeviceProperties2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceProperties2( physicalDevice, pProperties );
+    }
+
+    void vkGetPhysicalDeviceProperties2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceProperties2KHR( physicalDevice, pProperties );
+    }
+
+    void vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( VkPhysicalDevice physicalDevice, const VkQueryPoolPerformanceCreateInfoKHR* pPerformanceQueryCreateInfo, uint32_t* pNumPasses ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( physicalDevice, pPerformanceQueryCreateInfo, pNumPasses );
+    }
+
+    void vkGetPhysicalDeviceQueueFamilyProperties( VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties* pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceQueueFamilyProperties( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties );
+    }
+
+    void vkGetPhysicalDeviceQueueFamilyProperties2( VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceQueueFamilyProperties2( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties );
+    }
+
+    void vkGetPhysicalDeviceQueueFamilyProperties2KHR( VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceQueueFamilyProperties2KHR( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties );
+    }
+
+    void vkGetPhysicalDeviceSparseImageFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t* pPropertyCount, VkSparseImageFormatProperties* pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceSparseImageFormatProperties( physicalDevice, format, type, samples, usage, tiling, pPropertyCount, pProperties );
+    }
+
+    void vkGetPhysicalDeviceSparseImageFormatProperties2( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceSparseImageFormatProperties2( physicalDevice, pFormatInfo, pPropertyCount, pProperties );
+    }
+
+    void vkGetPhysicalDeviceSparseImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceSparseImageFormatProperties2KHR( physicalDevice, pFormatInfo, pPropertyCount, pProperties );
+    }
+
+    VkResult vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( VkPhysicalDevice physicalDevice, uint32_t* pCombinationCount, VkFramebufferMixedSamplesCombinationNV* pCombinations ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( physicalDevice, pCombinationCount, pCombinations );
+    }
+
+    VkResult vkGetPhysicalDeviceSurfaceCapabilities2EXT( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilities2EXT* pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceSurfaceCapabilities2EXT( physicalDevice, surface, pSurfaceCapabilities );
+    }
+
+    VkResult vkGetPhysicalDeviceSurfaceCapabilities2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkSurfaceCapabilities2KHR* pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceSurfaceCapabilities2KHR( physicalDevice, pSurfaceInfo, pSurfaceCapabilities );
+    }
+
+    VkResult vkGetPhysicalDeviceSurfaceCapabilitiesKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilitiesKHR* pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceSurfaceCapabilitiesKHR( physicalDevice, surface, pSurfaceCapabilities );
+    }
+
+    VkResult vkGetPhysicalDeviceSurfaceFormats2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, VkSurfaceFormat2KHR* pSurfaceFormats ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceSurfaceFormats2KHR( physicalDevice, pSurfaceInfo, pSurfaceFormatCount, pSurfaceFormats );
+    }
+
+    VkResult vkGetPhysicalDeviceSurfaceFormatsKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pSurfaceFormatCount, VkSurfaceFormatKHR* pSurfaceFormats ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceSurfaceFormatsKHR( physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats );
+    }
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    VkResult vkGetPhysicalDeviceSurfacePresentModes2EXT( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceSurfacePresentModes2EXT( physicalDevice, pSurfaceInfo, pPresentModeCount, pPresentModes );
+    }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+    VkResult vkGetPhysicalDeviceSurfacePresentModesKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceSurfacePresentModesKHR( physicalDevice, surface, pPresentModeCount, pPresentModes );
+    }
+
+    VkResult vkGetPhysicalDeviceSurfaceSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, VkSurfaceKHR surface, VkBool32* pSupported ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceSurfaceSupportKHR( physicalDevice, queueFamilyIndex, surface, pSupported );
+    }
+
+    VkResult vkGetPhysicalDeviceToolPropertiesEXT( VkPhysicalDevice physicalDevice, uint32_t* pToolCount, VkPhysicalDeviceToolPropertiesEXT* pToolProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceToolPropertiesEXT( physicalDevice, pToolCount, pToolProperties );
+    }
+
 #ifdef VK_USE_PLATFORM_WAYLAND_KHR
-  VkBool32 vkGetPhysicalDeviceWaylandPresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, struct wl_display* display  ) const
-  {
-    return ::vkGetPhysicalDeviceWaylandPresentationSupportKHR( physicalDevice, queueFamilyIndex, display);
-  }
+    VkBool32 vkGetPhysicalDeviceWaylandPresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, struct wl_display* display ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceWaylandPresentationSupportKHR( physicalDevice, queueFamilyIndex, display );
+    }
 #endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
 #ifdef VK_USE_PLATFORM_WIN32_KHR
-  VkBool32 vkGetPhysicalDeviceWin32PresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex  ) const
-  {
-    return ::vkGetPhysicalDeviceWin32PresentationSupportKHR( physicalDevice, queueFamilyIndex);
-  }
+    VkBool32 vkGetPhysicalDeviceWin32PresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceWin32PresentationSupportKHR( physicalDevice, queueFamilyIndex );
+    }
 #endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
 #ifdef VK_USE_PLATFORM_XCB_KHR
-  VkBool32 vkGetPhysicalDeviceXcbPresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id  ) const
-  {
-    return ::vkGetPhysicalDeviceXcbPresentationSupportKHR( physicalDevice, queueFamilyIndex, connection, visual_id);
-  }
+    VkBool32 vkGetPhysicalDeviceXcbPresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceXcbPresentationSupportKHR( physicalDevice, queueFamilyIndex, connection, visual_id );
+    }
 #endif /*VK_USE_PLATFORM_XCB_KHR*/
+
 #ifdef VK_USE_PLATFORM_XLIB_KHR
-  VkBool32 vkGetPhysicalDeviceXlibPresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, Display* dpy, VisualID visualID  ) const
-  {
-    return ::vkGetPhysicalDeviceXlibPresentationSupportKHR( physicalDevice, queueFamilyIndex, dpy, visualID);
-  }
+    VkBool32 vkGetPhysicalDeviceXlibPresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, Display* dpy, VisualID visualID ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceXlibPresentationSupportKHR( physicalDevice, queueFamilyIndex, dpy, visualID );
+    }
 #endif /*VK_USE_PLATFORM_XLIB_KHR*/
-  VkResult vkGetPipelineCacheData( VkDevice device, VkPipelineCache pipelineCache, size_t* pDataSize, void* pData  ) const
-  {
-    return ::vkGetPipelineCacheData( device, pipelineCache, pDataSize, pData);
-  }
-  VkResult vkGetQueryPoolResults( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VkDeviceSize stride, VkQueryResultFlags flags  ) const
-  {
-    return ::vkGetQueryPoolResults( device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags);
-  }
-  void vkGetQueueCheckpointDataNV( VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointDataNV* pCheckpointData  ) const
-  {
-    return ::vkGetQueueCheckpointDataNV( queue, pCheckpointDataCount, pCheckpointData);
-  }
-#ifdef VK_USE_PLATFORM_XLIB_XRANDR_NV
-  VkResult vkGetRandROutputDisplayEXT( VkPhysicalDevice physicalDevice, Display* dpy, RROutput rrOutput, VkDisplayKHR* pDisplay  ) const
-  {
-    return ::vkGetRandROutputDisplayEXT( physicalDevice, dpy, rrOutput, pDisplay);
-  }
-#endif /*VK_USE_PLATFORM_XLIB_XRANDR_NV*/
-  VkResult vkGetRayTracingShaderGroupHandlesNV( VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData  ) const
-  {
-    return ::vkGetRayTracingShaderGroupHandlesNV( device, pipeline, firstGroup, groupCount, dataSize, pData);
-  }
-  VkResult vkGetRefreshCycleDurationGOOGLE( VkDevice device, VkSwapchainKHR swapchain, VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties  ) const
-  {
-    return ::vkGetRefreshCycleDurationGOOGLE( device, swapchain, pDisplayTimingProperties);
-  }
-  void vkGetRenderAreaGranularity( VkDevice device, VkRenderPass renderPass, VkExtent2D* pGranularity  ) const
-  {
-    return ::vkGetRenderAreaGranularity( device, renderPass, pGranularity);
-  }
-  VkResult vkGetSemaphoreFdKHR( VkDevice device, const VkSemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd  ) const
-  {
-    return ::vkGetSemaphoreFdKHR( device, pGetFdInfo, pFd);
-  }
+
+    VkResult vkGetPipelineCacheData( VkDevice device, VkPipelineCache pipelineCache, size_t* pDataSize, void* pData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPipelineCacheData( device, pipelineCache, pDataSize, pData );
+    }
+
+    VkResult vkGetPipelineExecutableInternalRepresentationsKHR( VkDevice device, const VkPipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pInternalRepresentationCount, VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPipelineExecutableInternalRepresentationsKHR( device, pExecutableInfo, pInternalRepresentationCount, pInternalRepresentations );
+    }
+
+    VkResult vkGetPipelineExecutablePropertiesKHR( VkDevice device, const VkPipelineInfoKHR* pPipelineInfo, uint32_t* pExecutableCount, VkPipelineExecutablePropertiesKHR* pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPipelineExecutablePropertiesKHR( device, pPipelineInfo, pExecutableCount, pProperties );
+    }
+
+    VkResult vkGetPipelineExecutableStatisticsKHR( VkDevice device, const VkPipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pStatisticCount, VkPipelineExecutableStatisticKHR* pStatistics ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPipelineExecutableStatisticsKHR( device, pExecutableInfo, pStatisticCount, pStatistics );
+    }
+
+    void vkGetPrivateDataEXT( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlotEXT privateDataSlot, uint64_t* pData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPrivateDataEXT( device, objectType, objectHandle, privateDataSlot, pData );
+    }
+
+    VkResult vkGetQueryPoolResults( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VkDeviceSize stride, VkQueryResultFlags flags ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetQueryPoolResults( device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags );
+    }
+
+    void vkGetQueueCheckpointDataNV( VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointDataNV* pCheckpointData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetQueueCheckpointDataNV( queue, pCheckpointDataCount, pCheckpointData );
+    }
+
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+    VkResult vkGetRandROutputDisplayEXT( VkPhysicalDevice physicalDevice, Display* dpy, RROutput rrOutput, VkDisplayKHR* pDisplay ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetRandROutputDisplayEXT( physicalDevice, dpy, rrOutput, pDisplay );
+    }
+#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VkResult vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( device, pipeline, firstGroup, groupCount, dataSize, pData );
+    }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VkResult vkGetRayTracingShaderGroupHandlesKHR( VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetRayTracingShaderGroupHandlesKHR( device, pipeline, firstGroup, groupCount, dataSize, pData );
+    }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+    VkResult vkGetRayTracingShaderGroupHandlesNV( VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetRayTracingShaderGroupHandlesNV( device, pipeline, firstGroup, groupCount, dataSize, pData );
+    }
+
+    VkResult vkGetRefreshCycleDurationGOOGLE( VkDevice device, VkSwapchainKHR swapchain, VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetRefreshCycleDurationGOOGLE( device, swapchain, pDisplayTimingProperties );
+    }
+
+    void vkGetRenderAreaGranularity( VkDevice device, VkRenderPass renderPass, VkExtent2D* pGranularity ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetRenderAreaGranularity( device, renderPass, pGranularity );
+    }
+
+    VkResult vkGetSemaphoreCounterValue( VkDevice device, VkSemaphore semaphore, uint64_t* pValue ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetSemaphoreCounterValue( device, semaphore, pValue );
+    }
+
+    VkResult vkGetSemaphoreCounterValueKHR( VkDevice device, VkSemaphore semaphore, uint64_t* pValue ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetSemaphoreCounterValueKHR( device, semaphore, pValue );
+    }
+
+    VkResult vkGetSemaphoreFdKHR( VkDevice device, const VkSemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetSemaphoreFdKHR( device, pGetFdInfo, pFd );
+    }
+
 #ifdef VK_USE_PLATFORM_WIN32_KHR
-  VkResult vkGetSemaphoreWin32HandleKHR( VkDevice device, const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle  ) const
-  {
-    return ::vkGetSemaphoreWin32HandleKHR( device, pGetWin32HandleInfo, pHandle);
-  }
+    VkResult vkGetSemaphoreWin32HandleKHR( VkDevice device, const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetSemaphoreWin32HandleKHR( device, pGetWin32HandleInfo, pHandle );
+    }
 #endif /*VK_USE_PLATFORM_WIN32_KHR*/
-  VkResult vkGetShaderInfoAMD( VkDevice device, VkPipeline pipeline, VkShaderStageFlagBits shaderStage, VkShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo  ) const
-  {
-    return ::vkGetShaderInfoAMD( device, pipeline, shaderStage, infoType, pInfoSize, pInfo);
-  }
-  VkResult vkGetSwapchainCounterEXT( VkDevice device, VkSwapchainKHR swapchain, VkSurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue  ) const
-  {
-    return ::vkGetSwapchainCounterEXT( device, swapchain, counter, pCounterValue);
-  }
-  VkResult vkGetSwapchainImagesKHR( VkDevice device, VkSwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VkImage* pSwapchainImages  ) const
-  {
-    return ::vkGetSwapchainImagesKHR( device, swapchain, pSwapchainImageCount, pSwapchainImages);
-  }
-  VkResult vkGetSwapchainStatusKHR( VkDevice device, VkSwapchainKHR swapchain  ) const
-  {
-    return ::vkGetSwapchainStatusKHR( device, swapchain);
-  }
-  VkResult vkGetValidationCacheDataEXT( VkDevice device, VkValidationCacheEXT validationCache, size_t* pDataSize, void* pData  ) const
-  {
-    return ::vkGetValidationCacheDataEXT( device, validationCache, pDataSize, pData);
-  }
-  VkResult vkImportFenceFdKHR( VkDevice device, const VkImportFenceFdInfoKHR* pImportFenceFdInfo  ) const
-  {
-    return ::vkImportFenceFdKHR( device, pImportFenceFdInfo);
-  }
+
+    VkResult vkGetShaderInfoAMD( VkDevice device, VkPipeline pipeline, VkShaderStageFlagBits shaderStage, VkShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetShaderInfoAMD( device, pipeline, shaderStage, infoType, pInfoSize, pInfo );
+    }
+
+    VkResult vkGetSwapchainCounterEXT( VkDevice device, VkSwapchainKHR swapchain, VkSurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetSwapchainCounterEXT( device, swapchain, counter, pCounterValue );
+    }
+
+    VkResult vkGetSwapchainImagesKHR( VkDevice device, VkSwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VkImage* pSwapchainImages ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetSwapchainImagesKHR( device, swapchain, pSwapchainImageCount, pSwapchainImages );
+    }
+
+    VkResult vkGetSwapchainStatusKHR( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetSwapchainStatusKHR( device, swapchain );
+    }
+
+    VkResult vkGetValidationCacheDataEXT( VkDevice device, VkValidationCacheEXT validationCache, size_t* pDataSize, void* pData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetValidationCacheDataEXT( device, validationCache, pDataSize, pData );
+    }
+
+    VkResult vkImportFenceFdKHR( VkDevice device, const VkImportFenceFdInfoKHR* pImportFenceFdInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkImportFenceFdKHR( device, pImportFenceFdInfo );
+    }
+
 #ifdef VK_USE_PLATFORM_WIN32_KHR
-  VkResult vkImportFenceWin32HandleKHR( VkDevice device, const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo  ) const
-  {
-    return ::vkImportFenceWin32HandleKHR( device, pImportFenceWin32HandleInfo);
-  }
+    VkResult vkImportFenceWin32HandleKHR( VkDevice device, const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkImportFenceWin32HandleKHR( device, pImportFenceWin32HandleInfo );
+    }
 #endif /*VK_USE_PLATFORM_WIN32_KHR*/
-  VkResult vkImportSemaphoreFdKHR( VkDevice device, const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo  ) const
-  {
-    return ::vkImportSemaphoreFdKHR( device, pImportSemaphoreFdInfo);
-  }
+
+    VkResult vkImportSemaphoreFdKHR( VkDevice device, const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkImportSemaphoreFdKHR( device, pImportSemaphoreFdInfo );
+    }
+
 #ifdef VK_USE_PLATFORM_WIN32_KHR
-  VkResult vkImportSemaphoreWin32HandleKHR( VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo  ) const
-  {
-    return ::vkImportSemaphoreWin32HandleKHR( device, pImportSemaphoreWin32HandleInfo);
-  }
+    VkResult vkImportSemaphoreWin32HandleKHR( VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkImportSemaphoreWin32HandleKHR( device, pImportSemaphoreWin32HandleInfo );
+    }
 #endif /*VK_USE_PLATFORM_WIN32_KHR*/
-  VkResult vkInvalidateMappedMemoryRanges( VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges  ) const
-  {
-    return ::vkInvalidateMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges);
-  }
-  VkResult vkMapMemory( VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void** ppData  ) const
-  {
-    return ::vkMapMemory( device, memory, offset, size, flags, ppData);
-  }
-  VkResult vkMergePipelineCaches( VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache* pSrcCaches  ) const
-  {
-    return ::vkMergePipelineCaches( device, dstCache, srcCacheCount, pSrcCaches);
-  }
-  VkResult vkMergeValidationCachesEXT( VkDevice device, VkValidationCacheEXT dstCache, uint32_t srcCacheCount, const VkValidationCacheEXT* pSrcCaches  ) const
-  {
-    return ::vkMergeValidationCachesEXT( device, dstCache, srcCacheCount, pSrcCaches);
-  }
-  void vkQueueBeginDebugUtilsLabelEXT( VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo  ) const
-  {
-    return ::vkQueueBeginDebugUtilsLabelEXT( queue, pLabelInfo);
-  }
-  VkResult vkQueueBindSparse( VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence  ) const
-  {
-    return ::vkQueueBindSparse( queue, bindInfoCount, pBindInfo, fence);
-  }
-  void vkQueueEndDebugUtilsLabelEXT( VkQueue queue  ) const
-  {
-    return ::vkQueueEndDebugUtilsLabelEXT( queue);
-  }
-  void vkQueueInsertDebugUtilsLabelEXT( VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo  ) const
-  {
-    return ::vkQueueInsertDebugUtilsLabelEXT( queue, pLabelInfo);
-  }
-  VkResult vkQueuePresentKHR( VkQueue queue, const VkPresentInfoKHR* pPresentInfo  ) const
-  {
-    return ::vkQueuePresentKHR( queue, pPresentInfo);
-  }
-  VkResult vkQueueSubmit( VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence  ) const
-  {
-    return ::vkQueueSubmit( queue, submitCount, pSubmits, fence);
-  }
-  VkResult vkQueueWaitIdle( VkQueue queue  ) const
-  {
-    return ::vkQueueWaitIdle( queue);
-  }
-  VkResult vkRegisterDeviceEventEXT( VkDevice device, const VkDeviceEventInfoEXT* pDeviceEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence  ) const
-  {
-    return ::vkRegisterDeviceEventEXT( device, pDeviceEventInfo, pAllocator, pFence);
-  }
-  VkResult vkRegisterDisplayEventEXT( VkDevice device, VkDisplayKHR display, const VkDisplayEventInfoEXT* pDisplayEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence  ) const
-  {
-    return ::vkRegisterDisplayEventEXT( device, display, pDisplayEventInfo, pAllocator, pFence);
-  }
-  VkResult vkRegisterObjectsNVX( VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectTableEntryNVX* const* ppObjectTableEntries, const uint32_t* pObjectIndices  ) const
-  {
-    return ::vkRegisterObjectsNVX( device, objectTable, objectCount, ppObjectTableEntries, pObjectIndices);
-  }
-  VkResult vkReleaseDisplayEXT( VkPhysicalDevice physicalDevice, VkDisplayKHR display  ) const
-  {
-    return ::vkReleaseDisplayEXT( physicalDevice, display);
-  }
-  VkResult vkResetCommandBuffer( VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags  ) const
-  {
-    return ::vkResetCommandBuffer( commandBuffer, flags);
-  }
-  VkResult vkResetCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags  ) const
-  {
-    return ::vkResetCommandPool( device, commandPool, flags);
-  }
-  VkResult vkResetDescriptorPool( VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags  ) const
-  {
-    return ::vkResetDescriptorPool( device, descriptorPool, flags);
-  }
-  VkResult vkResetEvent( VkDevice device, VkEvent event  ) const
-  {
-    return ::vkResetEvent( device, event);
-  }
-  VkResult vkResetFences( VkDevice device, uint32_t fenceCount, const VkFence* pFences  ) const
-  {
-    return ::vkResetFences( device, fenceCount, pFences);
-  }
-  VkResult vkSetDebugUtilsObjectNameEXT( VkDevice device, const VkDebugUtilsObjectNameInfoEXT* pNameInfo  ) const
-  {
-    return ::vkSetDebugUtilsObjectNameEXT( device, pNameInfo);
-  }
-  VkResult vkSetDebugUtilsObjectTagEXT( VkDevice device, const VkDebugUtilsObjectTagInfoEXT* pTagInfo  ) const
-  {
-    return ::vkSetDebugUtilsObjectTagEXT( device, pTagInfo);
-  }
-  VkResult vkSetEvent( VkDevice device, VkEvent event  ) const
-  {
-    return ::vkSetEvent( device, event);
-  }
-  void vkSetHdrMetadataEXT( VkDevice device, uint32_t swapchainCount, const VkSwapchainKHR* pSwapchains, const VkHdrMetadataEXT* pMetadata  ) const
-  {
-    return ::vkSetHdrMetadataEXT( device, swapchainCount, pSwapchains, pMetadata);
-  }
-  void vkSubmitDebugUtilsMessageEXT( VkInstance instance, VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT messageTypes, const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData  ) const
-  {
-    return ::vkSubmitDebugUtilsMessageEXT( instance, messageSeverity, messageTypes, pCallbackData);
-  }
-  void vkTrimCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags  ) const
-  {
-    return ::vkTrimCommandPool( device, commandPool, flags);
-  }
-  void vkTrimCommandPoolKHR( VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags  ) const
-  {
-    return ::vkTrimCommandPoolKHR( device, commandPool, flags);
-  }
-  void vkUnmapMemory( VkDevice device, VkDeviceMemory memory  ) const
-  {
-    return ::vkUnmapMemory( device, memory);
-  }
-  VkResult vkUnregisterObjectsNVX( VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices  ) const
-  {
-    return ::vkUnregisterObjectsNVX( device, objectTable, objectCount, pObjectEntryTypes, pObjectIndices);
-  }
-  void vkUpdateDescriptorSetWithTemplate( VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData  ) const
-  {
-    return ::vkUpdateDescriptorSetWithTemplate( device, descriptorSet, descriptorUpdateTemplate, pData);
-  }
-  void vkUpdateDescriptorSetWithTemplateKHR( VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData  ) const
-  {
-    return ::vkUpdateDescriptorSetWithTemplateKHR( device, descriptorSet, descriptorUpdateTemplate, pData);
-  }
-  void vkUpdateDescriptorSets( VkDevice device, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const VkCopyDescriptorSet* pDescriptorCopies  ) const
-  {
-    return ::vkUpdateDescriptorSets( device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies);
-  }
-  VkResult vkWaitForFences( VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll, uint64_t timeout  ) const
-  {
-    return ::vkWaitForFences( device, fenceCount, pFences, waitAll, timeout);
-  }
-};
+
+    VkResult vkInitializePerformanceApiINTEL( VkDevice device, const VkInitializePerformanceApiInfoINTEL* pInitializeInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkInitializePerformanceApiINTEL( device, pInitializeInfo );
+    }
+
+    VkResult vkInvalidateMappedMemoryRanges( VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkInvalidateMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges );
+    }
+
+    VkResult vkMapMemory( VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void** ppData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkMapMemory( device, memory, offset, size, flags, ppData );
+    }
+
+    VkResult vkMergePipelineCaches( VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache* pSrcCaches ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkMergePipelineCaches( device, dstCache, srcCacheCount, pSrcCaches );
+    }
+
+    VkResult vkMergeValidationCachesEXT( VkDevice device, VkValidationCacheEXT dstCache, uint32_t srcCacheCount, const VkValidationCacheEXT* pSrcCaches ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkMergeValidationCachesEXT( device, dstCache, srcCacheCount, pSrcCaches );
+    }
+
+    void vkQueueBeginDebugUtilsLabelEXT( VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkQueueBeginDebugUtilsLabelEXT( queue, pLabelInfo );
+    }
+
+    VkResult vkQueueBindSparse( VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkQueueBindSparse( queue, bindInfoCount, pBindInfo, fence );
+    }
+
+    void vkQueueEndDebugUtilsLabelEXT( VkQueue queue ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkQueueEndDebugUtilsLabelEXT( queue );
+    }
+
+    void vkQueueInsertDebugUtilsLabelEXT( VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkQueueInsertDebugUtilsLabelEXT( queue, pLabelInfo );
+    }
+
+    VkResult vkQueuePresentKHR( VkQueue queue, const VkPresentInfoKHR* pPresentInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkQueuePresentKHR( queue, pPresentInfo );
+    }
+
+    VkResult vkQueueSetPerformanceConfigurationINTEL( VkQueue queue, VkPerformanceConfigurationINTEL configuration ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkQueueSetPerformanceConfigurationINTEL( queue, configuration );
+    }
+
+    VkResult vkQueueSubmit( VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkQueueSubmit( queue, submitCount, pSubmits, fence );
+    }
+
+    VkResult vkQueueWaitIdle( VkQueue queue ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkQueueWaitIdle( queue );
+    }
+
+    VkResult vkRegisterDeviceEventEXT( VkDevice device, const VkDeviceEventInfoEXT* pDeviceEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkRegisterDeviceEventEXT( device, pDeviceEventInfo, pAllocator, pFence );
+    }
+
+    VkResult vkRegisterDisplayEventEXT( VkDevice device, VkDisplayKHR display, const VkDisplayEventInfoEXT* pDisplayEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkRegisterDisplayEventEXT( device, display, pDisplayEventInfo, pAllocator, pFence );
+    }
+
+    VkResult vkReleaseDisplayEXT( VkPhysicalDevice physicalDevice, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkReleaseDisplayEXT( physicalDevice, display );
+    }
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    VkResult vkReleaseFullScreenExclusiveModeEXT( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkReleaseFullScreenExclusiveModeEXT( device, swapchain );
+    }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+    VkResult vkReleasePerformanceConfigurationINTEL( VkDevice device, VkPerformanceConfigurationINTEL configuration ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkReleasePerformanceConfigurationINTEL( device, configuration );
+    }
+
+    void vkReleaseProfilingLockKHR( VkDevice device ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkReleaseProfilingLockKHR( device );
+    }
+
+    VkResult vkResetCommandBuffer( VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkResetCommandBuffer( commandBuffer, flags );
+    }
+
+    VkResult vkResetCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkResetCommandPool( device, commandPool, flags );
+    }
+
+    VkResult vkResetDescriptorPool( VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkResetDescriptorPool( device, descriptorPool, flags );
+    }
+
+    VkResult vkResetEvent( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkResetEvent( device, event );
+    }
+
+    VkResult vkResetFences( VkDevice device, uint32_t fenceCount, const VkFence* pFences ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkResetFences( device, fenceCount, pFences );
+    }
+
+    void vkResetQueryPool( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkResetQueryPool( device, queryPool, firstQuery, queryCount );
+    }
+
+    void vkResetQueryPoolEXT( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkResetQueryPoolEXT( device, queryPool, firstQuery, queryCount );
+    }
+
+    VkResult vkSetDebugUtilsObjectNameEXT( VkDevice device, const VkDebugUtilsObjectNameInfoEXT* pNameInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkSetDebugUtilsObjectNameEXT( device, pNameInfo );
+    }
+
+    VkResult vkSetDebugUtilsObjectTagEXT( VkDevice device, const VkDebugUtilsObjectTagInfoEXT* pTagInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkSetDebugUtilsObjectTagEXT( device, pTagInfo );
+    }
+
+    VkResult vkSetEvent( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkSetEvent( device, event );
+    }
+
+    void vkSetHdrMetadataEXT( VkDevice device, uint32_t swapchainCount, const VkSwapchainKHR* pSwapchains, const VkHdrMetadataEXT* pMetadata ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkSetHdrMetadataEXT( device, swapchainCount, pSwapchains, pMetadata );
+    }
+
+    void vkSetLocalDimmingAMD( VkDevice device, VkSwapchainKHR swapChain, VkBool32 localDimmingEnable ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkSetLocalDimmingAMD( device, swapChain, localDimmingEnable );
+    }
+
+    VkResult vkSetPrivateDataEXT( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlotEXT privateDataSlot, uint64_t data ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkSetPrivateDataEXT( device, objectType, objectHandle, privateDataSlot, data );
+    }
+
+    VkResult vkSignalSemaphore( VkDevice device, const VkSemaphoreSignalInfo* pSignalInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkSignalSemaphore( device, pSignalInfo );
+    }
+
+    VkResult vkSignalSemaphoreKHR( VkDevice device, const VkSemaphoreSignalInfo* pSignalInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkSignalSemaphoreKHR( device, pSignalInfo );
+    }
+
+    void vkSubmitDebugUtilsMessageEXT( VkInstance instance, VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT messageTypes, const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkSubmitDebugUtilsMessageEXT( instance, messageSeverity, messageTypes, pCallbackData );
+    }
+
+    void vkTrimCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkTrimCommandPool( device, commandPool, flags );
+    }
+
+    void vkTrimCommandPoolKHR( VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkTrimCommandPoolKHR( device, commandPool, flags );
+    }
+
+    void vkUninitializePerformanceApiINTEL( VkDevice device ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkUninitializePerformanceApiINTEL( device );
+    }
+
+    void vkUnmapMemory( VkDevice device, VkDeviceMemory memory ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkUnmapMemory( device, memory );
+    }
+
+    void vkUpdateDescriptorSetWithTemplate( VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkUpdateDescriptorSetWithTemplate( device, descriptorSet, descriptorUpdateTemplate, pData );
+    }
+
+    void vkUpdateDescriptorSetWithTemplateKHR( VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkUpdateDescriptorSetWithTemplateKHR( device, descriptorSet, descriptorUpdateTemplate, pData );
+    }
+
+    void vkUpdateDescriptorSets( VkDevice device, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const VkCopyDescriptorSet* pDescriptorCopies ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkUpdateDescriptorSets( device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies );
+    }
+
+    VkResult vkWaitForFences( VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkWaitForFences( device, fenceCount, pFences, waitAll, timeout );
+    }
+
+    VkResult vkWaitSemaphores( VkDevice device, const VkSemaphoreWaitInfo* pWaitInfo, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkWaitSemaphores( device, pWaitInfo, timeout );
+    }
+
+    VkResult vkWaitSemaphoresKHR( VkDevice device, const VkSemaphoreWaitInfo* pWaitInfo, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkWaitSemaphoresKHR( device, pWaitInfo, timeout );
+    }
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VkResult vkWriteAccelerationStructuresPropertiesKHR( VkDevice device, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType queryType, size_t dataSize, void* pData, size_t stride ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkWriteAccelerationStructuresPropertiesKHR( device, accelerationStructureCount, pAccelerationStructures, queryType, dataSize, pData, stride );
+    }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+  };
+#endif
+
+  class DispatchLoaderDynamic;
+#if !defined(VULKAN_HPP_DISPATCH_LOADER_DYNAMIC)
+# if defined(VK_NO_PROTOTYPES)
+#  define VULKAN_HPP_DISPATCH_LOADER_DYNAMIC 1
+# else
+#  define VULKAN_HPP_DISPATCH_LOADER_DYNAMIC 0
+# endif
+#endif
+
+#if defined(_WIN32) && defined(VULKAN_HPP_STORAGE_SHARED)
+#  ifdef VULKAN_HPP_STORAGE_SHARED_EXPORT
+#    define VULKAN_HPP_STORAGE_API __declspec( dllexport )
+#  else
+#    define VULKAN_HPP_STORAGE_API __declspec( dllimport )
+#  endif
+#else
+#  define VULKAN_HPP_STORAGE_API
+#endif
+
+#if !defined(VULKAN_HPP_DEFAULT_DISPATCHER)
+# if VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1
+#  define VULKAN_HPP_DEFAULT_DISPATCHER ::VULKAN_HPP_NAMESPACE::defaultDispatchLoaderDynamic
+#  define VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_STORAGE_API DispatchLoaderDynamic defaultDispatchLoaderDynamic; }
+  extern VULKAN_HPP_STORAGE_API DispatchLoaderDynamic defaultDispatchLoaderDynamic;
+# else
+#  define VULKAN_HPP_DEFAULT_DISPATCHER ::VULKAN_HPP_NAMESPACE::DispatchLoaderStatic()
+#  define VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE
+# endif
+#endif
+
+#if !defined(VULKAN_HPP_DEFAULT_DISPATCHER_TYPE)
+# if VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1
+  #define VULKAN_HPP_DEFAULT_DISPATCHER_TYPE ::VULKAN_HPP_NAMESPACE::DispatchLoaderDynamic
+# else
+#  define VULKAN_HPP_DEFAULT_DISPATCHER_TYPE ::VULKAN_HPP_NAMESPACE::DispatchLoaderStatic
+# endif
+#endif
+
+#if defined( VULKAN_HPP_NO_DEFAULT_DISPATCHER )
+#  define VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT
+#  define VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT
+#  define VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT
+#else
+#  define VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT = {}
+#  define VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT = nullptr
+#  define VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT = VULKAN_HPP_DEFAULT_DISPATCHER
+#endif
 
   struct AllocationCallbacks;
 
@@ -2514,26 +3597,32 @@
   class ObjectDestroy
   {
     public:
-      ObjectDestroy( OwnerType owner = OwnerType(), Optional<const AllocationCallbacks> allocationCallbacks = nullptr, Dispatch const &dispatch = Dispatch() )
-        : m_owner( owner )
-        , m_allocationCallbacks( allocationCallbacks )
-        , m_dispatch( &dispatch )
-      {}
+    ObjectDestroy() = default;
 
-      OwnerType getOwner() const { return m_owner; }
-      Optional<const AllocationCallbacks> getAllocator() const { return m_allocationCallbacks; }
+    ObjectDestroy( OwnerType owner,
+                   Optional<const AllocationCallbacks> allocationCallbacks
+                                    VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+                   Dispatch const & dispatch = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT
+      : m_owner( owner )
+      , m_allocationCallbacks( allocationCallbacks )
+      , m_dispatch( &dispatch )
+    {}
+
+      OwnerType getOwner() const VULKAN_HPP_NOEXCEPT { return m_owner; }
+      Optional<const AllocationCallbacks> getAllocator() const VULKAN_HPP_NOEXCEPT { return m_allocationCallbacks; }
 
     protected:
       template <typename T>
-      void destroy(T t)
+      void destroy(T t) VULKAN_HPP_NOEXCEPT
       {
+        VULKAN_HPP_ASSERT( m_owner && m_dispatch );
         m_owner.destroy( t, m_allocationCallbacks, *m_dispatch );
       }
 
     private:
-      OwnerType m_owner;
-      Optional<const AllocationCallbacks> m_allocationCallbacks;
-      Dispatch const* m_dispatch;
+    OwnerType                           m_owner               = {};
+    Optional<const AllocationCallbacks> m_allocationCallbacks = nullptr;
+    Dispatch const *                    m_dispatch            = nullptr;
   };
 
   class NoParent;
@@ -2542,5504 +3631,289 @@
   class ObjectDestroy<NoParent,Dispatch>
   {
     public:
-      ObjectDestroy( Optional<const AllocationCallbacks> allocationCallbacks = nullptr, Dispatch const &dispatch = Dispatch() )
-        : m_allocationCallbacks( allocationCallbacks )
-        , m_dispatch( &dispatch )
-      {}
+    ObjectDestroy() = default;
 
-      Optional<const AllocationCallbacks> getAllocator() const { return m_allocationCallbacks; }
+    ObjectDestroy( Optional<const AllocationCallbacks> allocationCallbacks,
+                   Dispatch const &                    dispatch = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT
+      : m_allocationCallbacks( allocationCallbacks )
+      , m_dispatch( &dispatch )
+    {}
+
+      Optional<const AllocationCallbacks> getAllocator() const VULKAN_HPP_NOEXCEPT { return m_allocationCallbacks; }
 
     protected:
       template <typename T>
-      void destroy(T t)
+      void destroy(T t) VULKAN_HPP_NOEXCEPT
       {
+        VULKAN_HPP_ASSERT( m_dispatch );
         t.destroy( m_allocationCallbacks, *m_dispatch );
       }
 
     private:
-      Optional<const AllocationCallbacks> m_allocationCallbacks;
-      Dispatch const* m_dispatch;
+    Optional<const AllocationCallbacks> m_allocationCallbacks = nullptr;
+    Dispatch const *                    m_dispatch            = nullptr;
   };
 
   template <typename OwnerType, typename Dispatch>
   class ObjectFree
   {
-    public:
-      ObjectFree( OwnerType owner = OwnerType(), Optional<const AllocationCallbacks> allocationCallbacks = nullptr, Dispatch const &dispatch = Dispatch() )
-        : m_owner( owner )
-        , m_allocationCallbacks( allocationCallbacks )
-        , m_dispatch( &dispatch )
-      {}
+  public:
+    ObjectFree() = default;
 
-      OwnerType getOwner() const { return m_owner; }
-      Optional<const AllocationCallbacks> getAllocator() const { return m_allocationCallbacks; }
+    ObjectFree( OwnerType                                               owner,
+                Optional<const AllocationCallbacks> allocationCallbacks VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+                Dispatch const & dispatch = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT
+      : m_owner( owner )
+      , m_allocationCallbacks( allocationCallbacks )
+      , m_dispatch( &dispatch )
+    {}
 
-    protected:
-      template <typename T>
-      void destroy(T t)
-      {
-        m_owner.free( t, m_allocationCallbacks, *m_dispatch );
-      }
+    OwnerType getOwner() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_owner;
+    }
 
-    private:
-      OwnerType m_owner;
-      Optional<const AllocationCallbacks> m_allocationCallbacks;
-      Dispatch const* m_dispatch;
+    Optional<const AllocationCallbacks> getAllocator() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_allocationCallbacks;
+    }
+
+  protected:
+    template <typename T>
+    void destroy( T t ) VULKAN_HPP_NOEXCEPT
+    {
+      VULKAN_HPP_ASSERT( m_owner && m_dispatch );
+      m_owner.free( t, m_allocationCallbacks, *m_dispatch );
+    }
+
+  private:
+    OwnerType                           m_owner               = {};
+    Optional<const AllocationCallbacks> m_allocationCallbacks = nullptr;
+    Dispatch const *                    m_dispatch            = nullptr;
+  };
+
+  template <typename OwnerType, typename Dispatch>
+  class ObjectRelease
+  {
+  public:
+    ObjectRelease() = default;
+
+    ObjectRelease( OwnerType owner, Dispatch const & dispatch = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT
+      : m_owner( owner )
+      , m_dispatch( &dispatch )
+    {}
+
+    OwnerType getOwner() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_owner;
+    }
+
+  protected:
+    template <typename T>
+    void destroy( T t ) VULKAN_HPP_NOEXCEPT
+    {
+      VULKAN_HPP_ASSERT( m_owner && m_dispatch );
+      m_owner.release( t, *m_dispatch );
+    }
+
+  private:
+    OwnerType        m_owner    = {};
+    Dispatch const * m_dispatch = nullptr;
   };
 
   template <typename OwnerType, typename PoolType, typename Dispatch>
   class PoolFree
   {
     public:
-      PoolFree( OwnerType owner = OwnerType(), PoolType pool = PoolType(), Dispatch const &dispatch = Dispatch() )
+      PoolFree() = default;
+
+      PoolFree( OwnerType        owner,
+                PoolType         pool,
+                Dispatch const & dispatch = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT
         : m_owner( owner )
         , m_pool( pool )
         , m_dispatch( &dispatch )
       {}
 
-      OwnerType getOwner() const { return m_owner; }
-      PoolType getPool() const { return m_pool; }
+      OwnerType getOwner() const VULKAN_HPP_NOEXCEPT { return m_owner; }
+      PoolType getPool() const VULKAN_HPP_NOEXCEPT { return m_pool; }
 
     protected:
       template <typename T>
-      void destroy(T t)
+      void destroy(T t) VULKAN_HPP_NOEXCEPT
       {
         m_owner.free( m_pool, t, *m_dispatch );
       }
 
     private:
-      OwnerType m_owner;
-      PoolType m_pool;
-      Dispatch const* m_dispatch;
+      OwnerType        m_owner    = OwnerType();
+      PoolType         m_pool     = PoolType();
+      Dispatch const * m_dispatch = nullptr;
   };
-
-  using SampleMask = uint32_t;
 
   using Bool32 = uint32_t;
-
+  using DeviceAddress = uint64_t;
   using DeviceSize = uint64_t;
+  using SampleMask = uint32_t;
 
-  enum class FramebufferCreateFlagBits
-  {
-  };
-
-  using FramebufferCreateFlags = Flags<FramebufferCreateFlagBits, VkFramebufferCreateFlags>;
+  template <typename EnumType, EnumType value>
+  struct CppType
+  {};
 
-  enum class QueryPoolCreateFlagBits
+  template <typename Type>
+  struct isVulkanHandleType
   {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = false;
   };
 
-  using QueryPoolCreateFlags = Flags<QueryPoolCreateFlagBits, VkQueryPoolCreateFlags>;
-
-  enum class RenderPassCreateFlagBits
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+  enum class AccelerationStructureBuildTypeKHR
   {
+    eHost = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_KHR,
+    eDevice = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR,
+    eHostOrDevice = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_OR_DEVICE_KHR
   };
 
-  using RenderPassCreateFlags = Flags<RenderPassCreateFlagBits, VkRenderPassCreateFlags>;
-
-  enum class SamplerCreateFlagBits
+  VULKAN_HPP_INLINE std::string to_string( AccelerationStructureBuildTypeKHR value )
   {
-  };
-
-  using SamplerCreateFlags = Flags<SamplerCreateFlagBits, VkSamplerCreateFlags>;
-
-  enum class PipelineLayoutCreateFlagBits
-  {
-  };
-
-  using PipelineLayoutCreateFlags = Flags<PipelineLayoutCreateFlagBits, VkPipelineLayoutCreateFlags>;
-
-  enum class PipelineCacheCreateFlagBits
-  {
-  };
-
-  using PipelineCacheCreateFlags = Flags<PipelineCacheCreateFlagBits, VkPipelineCacheCreateFlags>;
-
-  enum class PipelineDepthStencilStateCreateFlagBits
-  {
-  };
-
-  using PipelineDepthStencilStateCreateFlags = Flags<PipelineDepthStencilStateCreateFlagBits, VkPipelineDepthStencilStateCreateFlags>;
-
-  enum class PipelineDynamicStateCreateFlagBits
-  {
-  };
-
-  using PipelineDynamicStateCreateFlags = Flags<PipelineDynamicStateCreateFlagBits, VkPipelineDynamicStateCreateFlags>;
-
-  enum class PipelineColorBlendStateCreateFlagBits
-  {
-  };
-
-  using PipelineColorBlendStateCreateFlags = Flags<PipelineColorBlendStateCreateFlagBits, VkPipelineColorBlendStateCreateFlags>;
-
-  enum class PipelineMultisampleStateCreateFlagBits
-  {
-  };
-
-  using PipelineMultisampleStateCreateFlags = Flags<PipelineMultisampleStateCreateFlagBits, VkPipelineMultisampleStateCreateFlags>;
-
-  enum class PipelineRasterizationStateCreateFlagBits
-  {
-  };
-
-  using PipelineRasterizationStateCreateFlags = Flags<PipelineRasterizationStateCreateFlagBits, VkPipelineRasterizationStateCreateFlags>;
-
-  enum class PipelineViewportStateCreateFlagBits
-  {
-  };
-
-  using PipelineViewportStateCreateFlags = Flags<PipelineViewportStateCreateFlagBits, VkPipelineViewportStateCreateFlags>;
-
-  enum class PipelineTessellationStateCreateFlagBits
-  {
-  };
-
-  using PipelineTessellationStateCreateFlags = Flags<PipelineTessellationStateCreateFlagBits, VkPipelineTessellationStateCreateFlags>;
-
-  enum class PipelineInputAssemblyStateCreateFlagBits
-  {
-  };
-
-  using PipelineInputAssemblyStateCreateFlags = Flags<PipelineInputAssemblyStateCreateFlagBits, VkPipelineInputAssemblyStateCreateFlags>;
-
-  enum class PipelineVertexInputStateCreateFlagBits
-  {
-  };
-
-  using PipelineVertexInputStateCreateFlags = Flags<PipelineVertexInputStateCreateFlagBits, VkPipelineVertexInputStateCreateFlags>;
-
-  enum class PipelineShaderStageCreateFlagBits
-  {
-  };
-
-  using PipelineShaderStageCreateFlags = Flags<PipelineShaderStageCreateFlagBits, VkPipelineShaderStageCreateFlags>;
-
-  enum class BufferViewCreateFlagBits
-  {
-  };
-
-  using BufferViewCreateFlags = Flags<BufferViewCreateFlagBits, VkBufferViewCreateFlags>;
-
-  enum class InstanceCreateFlagBits
-  {
-  };
-
-  using InstanceCreateFlags = Flags<InstanceCreateFlagBits, VkInstanceCreateFlags>;
-
-  enum class DeviceCreateFlagBits
-  {
-  };
-
-  using DeviceCreateFlags = Flags<DeviceCreateFlagBits, VkDeviceCreateFlags>;
-
-  enum class ImageViewCreateFlagBits
-  {
-  };
-
-  using ImageViewCreateFlags = Flags<ImageViewCreateFlagBits, VkImageViewCreateFlags>;
-
-  enum class SemaphoreCreateFlagBits
-  {
-  };
-
-  using SemaphoreCreateFlags = Flags<SemaphoreCreateFlagBits, VkSemaphoreCreateFlags>;
-
-  enum class ShaderModuleCreateFlagBits
-  {
-  };
-
-  using ShaderModuleCreateFlags = Flags<ShaderModuleCreateFlagBits, VkShaderModuleCreateFlags>;
-
-  enum class EventCreateFlagBits
-  {
-  };
-
-  using EventCreateFlags = Flags<EventCreateFlagBits, VkEventCreateFlags>;
-
-  enum class MemoryMapFlagBits
-  {
-  };
-
-  using MemoryMapFlags = Flags<MemoryMapFlagBits, VkMemoryMapFlags>;
-
-  enum class DescriptorPoolResetFlagBits
-  {
-  };
-
-  using DescriptorPoolResetFlags = Flags<DescriptorPoolResetFlagBits, VkDescriptorPoolResetFlags>;
-
-  enum class DescriptorUpdateTemplateCreateFlagBits
-  {
-  };
-
-  using DescriptorUpdateTemplateCreateFlags = Flags<DescriptorUpdateTemplateCreateFlagBits, VkDescriptorUpdateTemplateCreateFlags>;
-
-  using DescriptorUpdateTemplateCreateFlagsKHR = DescriptorUpdateTemplateCreateFlags;
-
-  enum class DisplayModeCreateFlagBitsKHR
-  {
-  };
-
-  using DisplayModeCreateFlagsKHR = Flags<DisplayModeCreateFlagBitsKHR, VkDisplayModeCreateFlagsKHR>;
-
-  enum class DisplaySurfaceCreateFlagBitsKHR
-  {
-  };
-
-  using DisplaySurfaceCreateFlagsKHR = Flags<DisplaySurfaceCreateFlagBitsKHR, VkDisplaySurfaceCreateFlagsKHR>;
-
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
-  enum class AndroidSurfaceCreateFlagBitsKHR
-  {
-  };
-#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
-
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
-  using AndroidSurfaceCreateFlagsKHR = Flags<AndroidSurfaceCreateFlagBitsKHR, VkAndroidSurfaceCreateFlagsKHR>;
-#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
-
-#ifdef VK_USE_PLATFORM_VI_NN
-  enum class ViSurfaceCreateFlagBitsNN
-  {
-  };
-#endif /*VK_USE_PLATFORM_VI_NN*/
-
-#ifdef VK_USE_PLATFORM_VI_NN
-  using ViSurfaceCreateFlagsNN = Flags<ViSurfaceCreateFlagBitsNN, VkViSurfaceCreateFlagsNN>;
-#endif /*VK_USE_PLATFORM_VI_NN*/
-
-#ifdef VK_USE_PLATFORM_WAYLAND_KHR
-  enum class WaylandSurfaceCreateFlagBitsKHR
-  {
-  };
-#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
-
-#ifdef VK_USE_PLATFORM_WAYLAND_KHR
-  using WaylandSurfaceCreateFlagsKHR = Flags<WaylandSurfaceCreateFlagBitsKHR, VkWaylandSurfaceCreateFlagsKHR>;
-#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-  enum class Win32SurfaceCreateFlagBitsKHR
-  {
-  };
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-  using Win32SurfaceCreateFlagsKHR = Flags<Win32SurfaceCreateFlagBitsKHR, VkWin32SurfaceCreateFlagsKHR>;
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-
-#ifdef VK_USE_PLATFORM_XLIB_KHR
-  enum class XlibSurfaceCreateFlagBitsKHR
-  {
-  };
-#endif /*VK_USE_PLATFORM_XLIB_KHR*/
-
-#ifdef VK_USE_PLATFORM_XLIB_KHR
-  using XlibSurfaceCreateFlagsKHR = Flags<XlibSurfaceCreateFlagBitsKHR, VkXlibSurfaceCreateFlagsKHR>;
-#endif /*VK_USE_PLATFORM_XLIB_KHR*/
-
-#ifdef VK_USE_PLATFORM_XCB_KHR
-  enum class XcbSurfaceCreateFlagBitsKHR
-  {
-  };
-#endif /*VK_USE_PLATFORM_XCB_KHR*/
-
-#ifdef VK_USE_PLATFORM_XCB_KHR
-  using XcbSurfaceCreateFlagsKHR = Flags<XcbSurfaceCreateFlagBitsKHR, VkXcbSurfaceCreateFlagsKHR>;
-#endif /*VK_USE_PLATFORM_XCB_KHR*/
-
-#ifdef VK_USE_PLATFORM_IOS_MVK
-  enum class IOSSurfaceCreateFlagBitsMVK
-  {
-  };
-#endif /*VK_USE_PLATFORM_IOS_MVK*/
-
-#ifdef VK_USE_PLATFORM_IOS_MVK
-  using IOSSurfaceCreateFlagsMVK = Flags<IOSSurfaceCreateFlagBitsMVK, VkIOSSurfaceCreateFlagsMVK>;
-#endif /*VK_USE_PLATFORM_IOS_MVK*/
-
-#ifdef VK_USE_PLATFORM_MACOS_MVK
-  enum class MacOSSurfaceCreateFlagBitsMVK
-  {
-  };
-#endif /*VK_USE_PLATFORM_MACOS_MVK*/
-
-#ifdef VK_USE_PLATFORM_MACOS_MVK
-  using MacOSSurfaceCreateFlagsMVK = Flags<MacOSSurfaceCreateFlagBitsMVK, VkMacOSSurfaceCreateFlagsMVK>;
-#endif /*VK_USE_PLATFORM_MACOS_MVK*/
-
-#ifdef VK_USE_PLATFORM_FUCHSIA_FUCHSIA
-  enum class ImagePipeSurfaceCreateFlagBitsFUCHSIA
-  {
-  };
-#endif /*VK_USE_PLATFORM_FUCHSIA_FUCHSIA*/
-
-#ifdef VK_USE_PLATFORM_FUCHSIA_FUCHSIA
-  using ImagePipeSurfaceCreateFlagsFUCHSIA = Flags<ImagePipeSurfaceCreateFlagBitsFUCHSIA, VkImagePipeSurfaceCreateFlagsFUCHSIA>;
-#endif /*VK_USE_PLATFORM_FUCHSIA_FUCHSIA*/
-
-  enum class CommandPoolTrimFlagBits
-  {
-  };
-
-  using CommandPoolTrimFlags = Flags<CommandPoolTrimFlagBits, VkCommandPoolTrimFlags>;
-
-  using CommandPoolTrimFlagsKHR = CommandPoolTrimFlags;
-
-  enum class PipelineViewportSwizzleStateCreateFlagBitsNV
-  {
-  };
-
-  using PipelineViewportSwizzleStateCreateFlagsNV = Flags<PipelineViewportSwizzleStateCreateFlagBitsNV, VkPipelineViewportSwizzleStateCreateFlagsNV>;
-
-  enum class PipelineDiscardRectangleStateCreateFlagBitsEXT
-  {
-  };
-
-  using PipelineDiscardRectangleStateCreateFlagsEXT = Flags<PipelineDiscardRectangleStateCreateFlagBitsEXT, VkPipelineDiscardRectangleStateCreateFlagsEXT>;
-
-  enum class PipelineCoverageToColorStateCreateFlagBitsNV
-  {
-  };
-
-  using PipelineCoverageToColorStateCreateFlagsNV = Flags<PipelineCoverageToColorStateCreateFlagBitsNV, VkPipelineCoverageToColorStateCreateFlagsNV>;
-
-  enum class PipelineCoverageModulationStateCreateFlagBitsNV
-  {
-  };
-
-  using PipelineCoverageModulationStateCreateFlagsNV = Flags<PipelineCoverageModulationStateCreateFlagBitsNV, VkPipelineCoverageModulationStateCreateFlagsNV>;
-
-  enum class ValidationCacheCreateFlagBitsEXT
-  {
-  };
-
-  using ValidationCacheCreateFlagsEXT = Flags<ValidationCacheCreateFlagBitsEXT, VkValidationCacheCreateFlagsEXT>;
-
-  enum class DebugUtilsMessengerCreateFlagBitsEXT
-  {
-  };
-
-  using DebugUtilsMessengerCreateFlagsEXT = Flags<DebugUtilsMessengerCreateFlagBitsEXT, VkDebugUtilsMessengerCreateFlagsEXT>;
-
-  enum class DebugUtilsMessengerCallbackDataFlagBitsEXT
-  {
-  };
-
-  using DebugUtilsMessengerCallbackDataFlagsEXT = Flags<DebugUtilsMessengerCallbackDataFlagBitsEXT, VkDebugUtilsMessengerCallbackDataFlagsEXT>;
-
-  enum class PipelineRasterizationConservativeStateCreateFlagBitsEXT
-  {
-  };
-
-  using PipelineRasterizationConservativeStateCreateFlagsEXT = Flags<PipelineRasterizationConservativeStateCreateFlagBitsEXT, VkPipelineRasterizationConservativeStateCreateFlagsEXT>;
-
-  enum class PipelineRasterizationStateStreamCreateFlagBitsEXT
-  {
-  };
-
-  using PipelineRasterizationStateStreamCreateFlagsEXT = Flags<PipelineRasterizationStateStreamCreateFlagBitsEXT, VkPipelineRasterizationStateStreamCreateFlagsEXT>;
-
-  class DeviceMemory
-  {
-  public:
-    VULKAN_HPP_CONSTEXPR DeviceMemory()
-      : m_deviceMemory(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_CONSTEXPR DeviceMemory( std::nullptr_t )
-      : m_deviceMemory(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT DeviceMemory( VkDeviceMemory deviceMemory )
-      : m_deviceMemory( deviceMemory )
-    {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
-    DeviceMemory & operator=(VkDeviceMemory deviceMemory)
-    {
-      m_deviceMemory = deviceMemory;
-      return *this; 
-    }
-#endif
-
-    DeviceMemory & operator=( std::nullptr_t )
-    {
-      m_deviceMemory = VK_NULL_HANDLE;
-      return *this;
-    }
-
-    bool operator==( DeviceMemory const & rhs ) const
-    {
-      return m_deviceMemory == rhs.m_deviceMemory;
-    }
-
-    bool operator!=(DeviceMemory const & rhs ) const
-    {
-      return m_deviceMemory != rhs.m_deviceMemory;
-    }
-
-    bool operator<(DeviceMemory const & rhs ) const
-    {
-      return m_deviceMemory < rhs.m_deviceMemory;
-    }
-
-
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDeviceMemory() const
-    {
-      return m_deviceMemory;
-    }
-
-    explicit operator bool() const
-    {
-      return m_deviceMemory != VK_NULL_HANDLE;
-    }
-
-    bool operator!() const
+    switch ( value )
     {
-      return m_deviceMemory == VK_NULL_HANDLE;
+      case AccelerationStructureBuildTypeKHR::eHost : return "Host";
+      case AccelerationStructureBuildTypeKHR::eDevice : return "Device";
+      case AccelerationStructureBuildTypeKHR::eHostOrDevice : return "HostOrDevice";
+      default: return "invalid";
     }
+  }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
 
-  private:
-    VkDeviceMemory m_deviceMemory;
-  };
-
-  static_assert( sizeof( DeviceMemory ) == sizeof( VkDeviceMemory ), "handle and wrapper have different size!" );
-
-  class CommandPool
+  enum class AccelerationStructureMemoryRequirementsTypeKHR
   {
-  public:
-    VULKAN_HPP_CONSTEXPR CommandPool()
-      : m_commandPool(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_CONSTEXPR CommandPool( std::nullptr_t )
-      : m_commandPool(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT CommandPool( VkCommandPool commandPool )
-      : m_commandPool( commandPool )
-    {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
-    CommandPool & operator=(VkCommandPool commandPool)
-    {
-      m_commandPool = commandPool;
-      return *this; 
-    }
-#endif
-
-    CommandPool & operator=( std::nullptr_t )
-    {
-      m_commandPool = VK_NULL_HANDLE;
-      return *this;
-    }
-
-    bool operator==( CommandPool const & rhs ) const
-    {
-      return m_commandPool == rhs.m_commandPool;
-    }
-
-    bool operator!=(CommandPool const & rhs ) const
-    {
-      return m_commandPool != rhs.m_commandPool;
-    }
-
-    bool operator<(CommandPool const & rhs ) const
-    {
-      return m_commandPool < rhs.m_commandPool;
-    }
-
-
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCommandPool() const
-    {
-      return m_commandPool;
-    }
-
-    explicit operator bool() const
-    {
-      return m_commandPool != VK_NULL_HANDLE;
-    }
-
-    bool operator!() const
-    {
-      return m_commandPool == VK_NULL_HANDLE;
-    }
-
-  private:
-    VkCommandPool m_commandPool;
+    eObject = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_KHR,
+    eBuildScratch = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_KHR,
+    eUpdateScratch = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_KHR
   };
+  using AccelerationStructureMemoryRequirementsTypeNV = AccelerationStructureMemoryRequirementsTypeKHR;
 
-  static_assert( sizeof( CommandPool ) == sizeof( VkCommandPool ), "handle and wrapper have different size!" );
-
-  class Buffer
+  VULKAN_HPP_INLINE std::string to_string( AccelerationStructureMemoryRequirementsTypeKHR value )
   {
-  public:
-    VULKAN_HPP_CONSTEXPR Buffer()
-      : m_buffer(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_CONSTEXPR Buffer( std::nullptr_t )
-      : m_buffer(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT Buffer( VkBuffer buffer )
-      : m_buffer( buffer )
-    {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
-    Buffer & operator=(VkBuffer buffer)
+    switch ( value )
     {
-      m_buffer = buffer;
-      return *this; 
+      case AccelerationStructureMemoryRequirementsTypeKHR::eObject : return "Object";
+      case AccelerationStructureMemoryRequirementsTypeKHR::eBuildScratch : return "BuildScratch";
+      case AccelerationStructureMemoryRequirementsTypeKHR::eUpdateScratch : return "UpdateScratch";
+      default: return "invalid";
     }
-#endif
+  }
 
-    Buffer & operator=( std::nullptr_t )
-    {
-      m_buffer = VK_NULL_HANDLE;
-      return *this;
-    }
-
-    bool operator==( Buffer const & rhs ) const
-    {
-      return m_buffer == rhs.m_buffer;
-    }
-
-    bool operator!=(Buffer const & rhs ) const
-    {
-      return m_buffer != rhs.m_buffer;
-    }
-
-    bool operator<(Buffer const & rhs ) const
-    {
-      return m_buffer < rhs.m_buffer;
-    }
-
-
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkBuffer() const
-    {
-      return m_buffer;
-    }
-
-    explicit operator bool() const
-    {
-      return m_buffer != VK_NULL_HANDLE;
-    }
-
-    bool operator!() const
-    {
-      return m_buffer == VK_NULL_HANDLE;
-    }
-
-  private:
-    VkBuffer m_buffer;
-  };
-
-  static_assert( sizeof( Buffer ) == sizeof( VkBuffer ), "handle and wrapper have different size!" );
-
-  class BufferView
+  enum class AccelerationStructureTypeKHR
   {
-  public:
-    VULKAN_HPP_CONSTEXPR BufferView()
-      : m_bufferView(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_CONSTEXPR BufferView( std::nullptr_t )
-      : m_bufferView(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT BufferView( VkBufferView bufferView )
-      : m_bufferView( bufferView )
-    {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
-    BufferView & operator=(VkBufferView bufferView)
-    {
-      m_bufferView = bufferView;
-      return *this; 
-    }
-#endif
-
-    BufferView & operator=( std::nullptr_t )
-    {
-      m_bufferView = VK_NULL_HANDLE;
-      return *this;
-    }
-
-    bool operator==( BufferView const & rhs ) const
-    {
-      return m_bufferView == rhs.m_bufferView;
-    }
-
-    bool operator!=(BufferView const & rhs ) const
-    {
-      return m_bufferView != rhs.m_bufferView;
-    }
-
-    bool operator<(BufferView const & rhs ) const
-    {
-      return m_bufferView < rhs.m_bufferView;
-    }
-
-
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkBufferView() const
-    {
-      return m_bufferView;
-    }
-
-    explicit operator bool() const
-    {
-      return m_bufferView != VK_NULL_HANDLE;
-    }
-
-    bool operator!() const
-    {
-      return m_bufferView == VK_NULL_HANDLE;
-    }
-
-  private:
-    VkBufferView m_bufferView;
+    eTopLevel = VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR,
+    eBottomLevel = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR
   };
+  using AccelerationStructureTypeNV = AccelerationStructureTypeKHR;
 
-  static_assert( sizeof( BufferView ) == sizeof( VkBufferView ), "handle and wrapper have different size!" );
-
-  class Image
+  VULKAN_HPP_INLINE std::string to_string( AccelerationStructureTypeKHR value )
   {
-  public:
-    VULKAN_HPP_CONSTEXPR Image()
-      : m_image(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_CONSTEXPR Image( std::nullptr_t )
-      : m_image(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT Image( VkImage image )
-      : m_image( image )
-    {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
-    Image & operator=(VkImage image)
+    switch ( value )
     {
-      m_image = image;
-      return *this; 
+      case AccelerationStructureTypeKHR::eTopLevel : return "TopLevel";
+      case AccelerationStructureTypeKHR::eBottomLevel : return "BottomLevel";
+      default: return "invalid";
     }
-#endif
+  }
 
-    Image & operator=( std::nullptr_t )
-    {
-      m_image = VK_NULL_HANDLE;
-      return *this;
-    }
-
-    bool operator==( Image const & rhs ) const
-    {
-      return m_image == rhs.m_image;
-    }
-
-    bool operator!=(Image const & rhs ) const
-    {
-      return m_image != rhs.m_image;
-    }
-
-    bool operator<(Image const & rhs ) const
-    {
-      return m_image < rhs.m_image;
-    }
-
-
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkImage() const
-    {
-      return m_image;
-    }
-
-    explicit operator bool() const
-    {
-      return m_image != VK_NULL_HANDLE;
-    }
-
-    bool operator!() const
-    {
-      return m_image == VK_NULL_HANDLE;
-    }
-
-  private:
-    VkImage m_image;
-  };
-
-  static_assert( sizeof( Image ) == sizeof( VkImage ), "handle and wrapper have different size!" );
-
-  class ImageView
+  enum class AccessFlagBits : VkAccessFlags
   {
-  public:
-    VULKAN_HPP_CONSTEXPR ImageView()
-      : m_imageView(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_CONSTEXPR ImageView( std::nullptr_t )
-      : m_imageView(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT ImageView( VkImageView imageView )
-      : m_imageView( imageView )
-    {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
-    ImageView & operator=(VkImageView imageView)
-    {
-      m_imageView = imageView;
-      return *this; 
-    }
-#endif
-
-    ImageView & operator=( std::nullptr_t )
-    {
-      m_imageView = VK_NULL_HANDLE;
-      return *this;
-    }
-
-    bool operator==( ImageView const & rhs ) const
-    {
-      return m_imageView == rhs.m_imageView;
-    }
-
-    bool operator!=(ImageView const & rhs ) const
-    {
-      return m_imageView != rhs.m_imageView;
-    }
-
-    bool operator<(ImageView const & rhs ) const
-    {
-      return m_imageView < rhs.m_imageView;
-    }
-
-
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkImageView() const
-    {
-      return m_imageView;
-    }
-
-    explicit operator bool() const
-    {
-      return m_imageView != VK_NULL_HANDLE;
-    }
-
-    bool operator!() const
-    {
-      return m_imageView == VK_NULL_HANDLE;
-    }
-
-  private:
-    VkImageView m_imageView;
+    eIndirectCommandRead = VK_ACCESS_INDIRECT_COMMAND_READ_BIT,
+    eIndexRead = VK_ACCESS_INDEX_READ_BIT,
+    eVertexAttributeRead = VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT,
+    eUniformRead = VK_ACCESS_UNIFORM_READ_BIT,
+    eInputAttachmentRead = VK_ACCESS_INPUT_ATTACHMENT_READ_BIT,
+    eShaderRead = VK_ACCESS_SHADER_READ_BIT,
+    eShaderWrite = VK_ACCESS_SHADER_WRITE_BIT,
+    eColorAttachmentRead = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT,
+    eColorAttachmentWrite = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
+    eDepthStencilAttachmentRead = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT,
+    eDepthStencilAttachmentWrite = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT,
+    eTransferRead = VK_ACCESS_TRANSFER_READ_BIT,
+    eTransferWrite = VK_ACCESS_TRANSFER_WRITE_BIT,
+    eHostRead = VK_ACCESS_HOST_READ_BIT,
+    eHostWrite = VK_ACCESS_HOST_WRITE_BIT,
+    eMemoryRead = VK_ACCESS_MEMORY_READ_BIT,
+    eMemoryWrite = VK_ACCESS_MEMORY_WRITE_BIT,
+    eTransformFeedbackWriteEXT = VK_ACCESS_TRANSFORM_FEEDBACK_WRITE_BIT_EXT,
+    eTransformFeedbackCounterReadEXT = VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT,
+    eTransformFeedbackCounterWriteEXT = VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT,
+    eConditionalRenderingReadEXT = VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT,
+    eColorAttachmentReadNoncoherentEXT = VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT,
+    eAccelerationStructureReadKHR = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_KHR,
+    eAccelerationStructureWriteKHR = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_KHR,
+    eShadingRateImageReadNV = VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV,
+    eFragmentDensityMapReadEXT = VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT,
+    eCommandPreprocessReadNV = VK_ACCESS_COMMAND_PREPROCESS_READ_BIT_NV,
+    eCommandPreprocessWriteNV = VK_ACCESS_COMMAND_PREPROCESS_WRITE_BIT_NV,
+    eAccelerationStructureReadNV = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV,
+    eAccelerationStructureWriteNV = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV,
+    eFragmentShadingRateAttachmentReadKHR = VK_ACCESS_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR
   };
 
-  static_assert( sizeof( ImageView ) == sizeof( VkImageView ), "handle and wrapper have different size!" );
-
-  class ShaderModule
+  VULKAN_HPP_INLINE std::string to_string( AccessFlagBits value )
   {
-  public:
-    VULKAN_HPP_CONSTEXPR ShaderModule()
-      : m_shaderModule(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_CONSTEXPR ShaderModule( std::nullptr_t )
-      : m_shaderModule(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT ShaderModule( VkShaderModule shaderModule )
-      : m_shaderModule( shaderModule )
-    {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
-    ShaderModule & operator=(VkShaderModule shaderModule)
-    {
-      m_shaderModule = shaderModule;
-      return *this; 
-    }
-#endif
-
-    ShaderModule & operator=( std::nullptr_t )
-    {
-      m_shaderModule = VK_NULL_HANDLE;
-      return *this;
-    }
-
-    bool operator==( ShaderModule const & rhs ) const
-    {
-      return m_shaderModule == rhs.m_shaderModule;
-    }
-
-    bool operator!=(ShaderModule const & rhs ) const
-    {
-      return m_shaderModule != rhs.m_shaderModule;
-    }
-
-    bool operator<(ShaderModule const & rhs ) const
-    {
-      return m_shaderModule < rhs.m_shaderModule;
-    }
-
-
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkShaderModule() const
-    {
-      return m_shaderModule;
-    }
-
-    explicit operator bool() const
-    {
-      return m_shaderModule != VK_NULL_HANDLE;
-    }
-
-    bool operator!() const
+    switch ( value )
     {
-      return m_shaderModule == VK_NULL_HANDLE;
+      case AccessFlagBits::eIndirectCommandRead : return "IndirectCommandRead";
+      case AccessFlagBits::eIndexRead : return "IndexRead";
+      case AccessFlagBits::eVertexAttributeRead : return "VertexAttributeRead";
+      case AccessFlagBits::eUniformRead : return "UniformRead";
+      case AccessFlagBits::eInputAttachmentRead : return "InputAttachmentRead";
+      case AccessFlagBits::eShaderRead : return "ShaderRead";
+      case AccessFlagBits::eShaderWrite : return "ShaderWrite";
+      case AccessFlagBits::eColorAttachmentRead : return "ColorAttachmentRead";
+      case AccessFlagBits::eColorAttachmentWrite : return "ColorAttachmentWrite";
+      case AccessFlagBits::eDepthStencilAttachmentRead : return "DepthStencilAttachmentRead";
+      case AccessFlagBits::eDepthStencilAttachmentWrite : return "DepthStencilAttachmentWrite";
+      case AccessFlagBits::eTransferRead : return "TransferRead";
+      case AccessFlagBits::eTransferWrite : return "TransferWrite";
+      case AccessFlagBits::eHostRead : return "HostRead";
+      case AccessFlagBits::eHostWrite : return "HostWrite";
+      case AccessFlagBits::eMemoryRead : return "MemoryRead";
+      case AccessFlagBits::eMemoryWrite : return "MemoryWrite";
+      case AccessFlagBits::eTransformFeedbackWriteEXT : return "TransformFeedbackWriteEXT";
+      case AccessFlagBits::eTransformFeedbackCounterReadEXT : return "TransformFeedbackCounterReadEXT";
+      case AccessFlagBits::eTransformFeedbackCounterWriteEXT : return "TransformFeedbackCounterWriteEXT";
+      case AccessFlagBits::eConditionalRenderingReadEXT : return "ConditionalRenderingReadEXT";
+      case AccessFlagBits::eColorAttachmentReadNoncoherentEXT : return "ColorAttachmentReadNoncoherentEXT";
+      case AccessFlagBits::eAccelerationStructureReadKHR : return "AccelerationStructureReadKHR";
+      case AccessFlagBits::eAccelerationStructureWriteKHR : return "AccelerationStructureWriteKHR";
+      case AccessFlagBits::eShadingRateImageReadNV : return "ShadingRateImageReadNV";
+      case AccessFlagBits::eFragmentDensityMapReadEXT : return "FragmentDensityMapReadEXT";
+      case AccessFlagBits::eCommandPreprocessReadNV : return "CommandPreprocessReadNV";
+      case AccessFlagBits::eCommandPreprocessWriteNV : return "CommandPreprocessWriteNV";
+      default: return "invalid";
     }
+  }
 
-  private:
-    VkShaderModule m_shaderModule;
-  };
+  enum class AcquireProfilingLockFlagBitsKHR : VkAcquireProfilingLockFlagsKHR
+  {};
 
-  static_assert( sizeof( ShaderModule ) == sizeof( VkShaderModule ), "handle and wrapper have different size!" );
-
-  class Pipeline
+  VULKAN_HPP_INLINE std::string to_string( AcquireProfilingLockFlagBitsKHR )
   {
-  public:
-    VULKAN_HPP_CONSTEXPR Pipeline()
-      : m_pipeline(VK_NULL_HANDLE)
-    {}
+    return "(void)";
+  }
 
-    VULKAN_HPP_CONSTEXPR Pipeline( std::nullptr_t )
-      : m_pipeline(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT Pipeline( VkPipeline pipeline )
-      : m_pipeline( pipeline )
-    {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
-    Pipeline & operator=(VkPipeline pipeline)
-    {
-      m_pipeline = pipeline;
-      return *this; 
-    }
-#endif
-
-    Pipeline & operator=( std::nullptr_t )
-    {
-      m_pipeline = VK_NULL_HANDLE;
-      return *this;
-    }
-
-    bool operator==( Pipeline const & rhs ) const
-    {
-      return m_pipeline == rhs.m_pipeline;
-    }
-
-    bool operator!=(Pipeline const & rhs ) const
-    {
-      return m_pipeline != rhs.m_pipeline;
-    }
-
-    bool operator<(Pipeline const & rhs ) const
-    {
-      return m_pipeline < rhs.m_pipeline;
-    }
-
-
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipeline() const
-    {
-      return m_pipeline;
-    }
-
-    explicit operator bool() const
-    {
-      return m_pipeline != VK_NULL_HANDLE;
-    }
-
-    bool operator!() const
-    {
-      return m_pipeline == VK_NULL_HANDLE;
-    }
-
-  private:
-    VkPipeline m_pipeline;
-  };
-
-  static_assert( sizeof( Pipeline ) == sizeof( VkPipeline ), "handle and wrapper have different size!" );
-
-  class PipelineLayout
+  enum class AttachmentDescriptionFlagBits : VkAttachmentDescriptionFlags
   {
-  public:
-    VULKAN_HPP_CONSTEXPR PipelineLayout()
-      : m_pipelineLayout(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_CONSTEXPR PipelineLayout( std::nullptr_t )
-      : m_pipelineLayout(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT PipelineLayout( VkPipelineLayout pipelineLayout )
-      : m_pipelineLayout( pipelineLayout )
-    {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
-    PipelineLayout & operator=(VkPipelineLayout pipelineLayout)
-    {
-      m_pipelineLayout = pipelineLayout;
-      return *this; 
-    }
-#endif
-
-    PipelineLayout & operator=( std::nullptr_t )
-    {
-      m_pipelineLayout = VK_NULL_HANDLE;
-      return *this;
-    }
-
-    bool operator==( PipelineLayout const & rhs ) const
-    {
-      return m_pipelineLayout == rhs.m_pipelineLayout;
-    }
-
-    bool operator!=(PipelineLayout const & rhs ) const
-    {
-      return m_pipelineLayout != rhs.m_pipelineLayout;
-    }
-
-    bool operator<(PipelineLayout const & rhs ) const
-    {
-      return m_pipelineLayout < rhs.m_pipelineLayout;
-    }
-
-
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipelineLayout() const
-    {
-      return m_pipelineLayout;
-    }
-
-    explicit operator bool() const
-    {
-      return m_pipelineLayout != VK_NULL_HANDLE;
-    }
-
-    bool operator!() const
-    {
-      return m_pipelineLayout == VK_NULL_HANDLE;
-    }
-
-  private:
-    VkPipelineLayout m_pipelineLayout;
+    eMayAlias = VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT
   };
 
-  static_assert( sizeof( PipelineLayout ) == sizeof( VkPipelineLayout ), "handle and wrapper have different size!" );
-
-  class Sampler
+  VULKAN_HPP_INLINE std::string to_string( AttachmentDescriptionFlagBits value )
   {
-  public:
-    VULKAN_HPP_CONSTEXPR Sampler()
-      : m_sampler(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_CONSTEXPR Sampler( std::nullptr_t )
-      : m_sampler(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT Sampler( VkSampler sampler )
-      : m_sampler( sampler )
-    {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
-    Sampler & operator=(VkSampler sampler)
+    switch ( value )
     {
-      m_sampler = sampler;
-      return *this; 
+      case AttachmentDescriptionFlagBits::eMayAlias : return "MayAlias";
+      default: return "invalid";
     }
-#endif
-
-    Sampler & operator=( std::nullptr_t )
-    {
-      m_sampler = VK_NULL_HANDLE;
-      return *this;
-    }
-
-    bool operator==( Sampler const & rhs ) const
-    {
-      return m_sampler == rhs.m_sampler;
-    }
-
-    bool operator!=(Sampler const & rhs ) const
-    {
-      return m_sampler != rhs.m_sampler;
-    }
-
-    bool operator<(Sampler const & rhs ) const
-    {
-      return m_sampler < rhs.m_sampler;
-    }
-
-
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSampler() const
-    {
-      return m_sampler;
-    }
-
-    explicit operator bool() const
-    {
-      return m_sampler != VK_NULL_HANDLE;
-    }
-
-    bool operator!() const
-    {
-      return m_sampler == VK_NULL_HANDLE;
-    }
-
-  private:
-    VkSampler m_sampler;
-  };
-
-  static_assert( sizeof( Sampler ) == sizeof( VkSampler ), "handle and wrapper have different size!" );
-
-  class DescriptorSet
-  {
-  public:
-    VULKAN_HPP_CONSTEXPR DescriptorSet()
-      : m_descriptorSet(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_CONSTEXPR DescriptorSet( std::nullptr_t )
-      : m_descriptorSet(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorSet( VkDescriptorSet descriptorSet )
-      : m_descriptorSet( descriptorSet )
-    {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
-    DescriptorSet & operator=(VkDescriptorSet descriptorSet)
-    {
-      m_descriptorSet = descriptorSet;
-      return *this; 
-    }
-#endif
-
-    DescriptorSet & operator=( std::nullptr_t )
-    {
-      m_descriptorSet = VK_NULL_HANDLE;
-      return *this;
-    }
-
-    bool operator==( DescriptorSet const & rhs ) const
-    {
-      return m_descriptorSet == rhs.m_descriptorSet;
-    }
-
-    bool operator!=(DescriptorSet const & rhs ) const
-    {
-      return m_descriptorSet != rhs.m_descriptorSet;
-    }
-
-    bool operator<(DescriptorSet const & rhs ) const
-    {
-      return m_descriptorSet < rhs.m_descriptorSet;
-    }
-
-
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorSet() const
-    {
-      return m_descriptorSet;
-    }
-
-    explicit operator bool() const
-    {
-      return m_descriptorSet != VK_NULL_HANDLE;
-    }
-
-    bool operator!() const
-    {
-      return m_descriptorSet == VK_NULL_HANDLE;
-    }
-
-  private:
-    VkDescriptorSet m_descriptorSet;
-  };
-
-  static_assert( sizeof( DescriptorSet ) == sizeof( VkDescriptorSet ), "handle and wrapper have different size!" );
-
-  class DescriptorSetLayout
-  {
-  public:
-    VULKAN_HPP_CONSTEXPR DescriptorSetLayout()
-      : m_descriptorSetLayout(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_CONSTEXPR DescriptorSetLayout( std::nullptr_t )
-      : m_descriptorSetLayout(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorSetLayout( VkDescriptorSetLayout descriptorSetLayout )
-      : m_descriptorSetLayout( descriptorSetLayout )
-    {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
-    DescriptorSetLayout & operator=(VkDescriptorSetLayout descriptorSetLayout)
-    {
-      m_descriptorSetLayout = descriptorSetLayout;
-      return *this; 
-    }
-#endif
-
-    DescriptorSetLayout & operator=( std::nullptr_t )
-    {
-      m_descriptorSetLayout = VK_NULL_HANDLE;
-      return *this;
-    }
-
-    bool operator==( DescriptorSetLayout const & rhs ) const
-    {
-      return m_descriptorSetLayout == rhs.m_descriptorSetLayout;
-    }
-
-    bool operator!=(DescriptorSetLayout const & rhs ) const
-    {
-      return m_descriptorSetLayout != rhs.m_descriptorSetLayout;
-    }
-
-    bool operator<(DescriptorSetLayout const & rhs ) const
-    {
-      return m_descriptorSetLayout < rhs.m_descriptorSetLayout;
-    }
-
-
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorSetLayout() const
-    {
-      return m_descriptorSetLayout;
-    }
-
-    explicit operator bool() const
-    {
-      return m_descriptorSetLayout != VK_NULL_HANDLE;
-    }
-
-    bool operator!() const
-    {
-      return m_descriptorSetLayout == VK_NULL_HANDLE;
-    }
-
-  private:
-    VkDescriptorSetLayout m_descriptorSetLayout;
-  };
-
-  static_assert( sizeof( DescriptorSetLayout ) == sizeof( VkDescriptorSetLayout ), "handle and wrapper have different size!" );
-
-  class DescriptorPool
-  {
-  public:
-    VULKAN_HPP_CONSTEXPR DescriptorPool()
-      : m_descriptorPool(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_CONSTEXPR DescriptorPool( std::nullptr_t )
-      : m_descriptorPool(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorPool( VkDescriptorPool descriptorPool )
-      : m_descriptorPool( descriptorPool )
-    {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
-    DescriptorPool & operator=(VkDescriptorPool descriptorPool)
-    {
-      m_descriptorPool = descriptorPool;
-      return *this; 
-    }
-#endif
-
-    DescriptorPool & operator=( std::nullptr_t )
-    {
-      m_descriptorPool = VK_NULL_HANDLE;
-      return *this;
-    }
-
-    bool operator==( DescriptorPool const & rhs ) const
-    {
-      return m_descriptorPool == rhs.m_descriptorPool;
-    }
-
-    bool operator!=(DescriptorPool const & rhs ) const
-    {
-      return m_descriptorPool != rhs.m_descriptorPool;
-    }
-
-    bool operator<(DescriptorPool const & rhs ) const
-    {
-      return m_descriptorPool < rhs.m_descriptorPool;
-    }
-
-
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorPool() const
-    {
-      return m_descriptorPool;
-    }
-
-    explicit operator bool() const
-    {
-      return m_descriptorPool != VK_NULL_HANDLE;
-    }
-
-    bool operator!() const
-    {
-      return m_descriptorPool == VK_NULL_HANDLE;
-    }
-
-  private:
-    VkDescriptorPool m_descriptorPool;
-  };
-
-  static_assert( sizeof( DescriptorPool ) == sizeof( VkDescriptorPool ), "handle and wrapper have different size!" );
-
-  class Fence
-  {
-  public:
-    VULKAN_HPP_CONSTEXPR Fence()
-      : m_fence(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_CONSTEXPR Fence( std::nullptr_t )
-      : m_fence(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT Fence( VkFence fence )
-      : m_fence( fence )
-    {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
-    Fence & operator=(VkFence fence)
-    {
-      m_fence = fence;
-      return *this; 
-    }
-#endif
-
-    Fence & operator=( std::nullptr_t )
-    {
-      m_fence = VK_NULL_HANDLE;
-      return *this;
-    }
-
-    bool operator==( Fence const & rhs ) const
-    {
-      return m_fence == rhs.m_fence;
-    }
-
-    bool operator!=(Fence const & rhs ) const
-    {
-      return m_fence != rhs.m_fence;
-    }
-
-    bool operator<(Fence const & rhs ) const
-    {
-      return m_fence < rhs.m_fence;
-    }
-
-
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkFence() const
-    {
-      return m_fence;
-    }
-
-    explicit operator bool() const
-    {
-      return m_fence != VK_NULL_HANDLE;
-    }
-
-    bool operator!() const
-    {
-      return m_fence == VK_NULL_HANDLE;
-    }
-
-  private:
-    VkFence m_fence;
-  };
-
-  static_assert( sizeof( Fence ) == sizeof( VkFence ), "handle and wrapper have different size!" );
-
-  class Semaphore
-  {
-  public:
-    VULKAN_HPP_CONSTEXPR Semaphore()
-      : m_semaphore(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_CONSTEXPR Semaphore( std::nullptr_t )
-      : m_semaphore(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT Semaphore( VkSemaphore semaphore )
-      : m_semaphore( semaphore )
-    {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
-    Semaphore & operator=(VkSemaphore semaphore)
-    {
-      m_semaphore = semaphore;
-      return *this; 
-    }
-#endif
-
-    Semaphore & operator=( std::nullptr_t )
-    {
-      m_semaphore = VK_NULL_HANDLE;
-      return *this;
-    }
-
-    bool operator==( Semaphore const & rhs ) const
-    {
-      return m_semaphore == rhs.m_semaphore;
-    }
-
-    bool operator!=(Semaphore const & rhs ) const
-    {
-      return m_semaphore != rhs.m_semaphore;
-    }
-
-    bool operator<(Semaphore const & rhs ) const
-    {
-      return m_semaphore < rhs.m_semaphore;
-    }
-
-
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSemaphore() const
-    {
-      return m_semaphore;
-    }
-
-    explicit operator bool() const
-    {
-      return m_semaphore != VK_NULL_HANDLE;
-    }
-
-    bool operator!() const
-    {
-      return m_semaphore == VK_NULL_HANDLE;
-    }
-
-  private:
-    VkSemaphore m_semaphore;
-  };
-
-  static_assert( sizeof( Semaphore ) == sizeof( VkSemaphore ), "handle and wrapper have different size!" );
-
-  class Event
-  {
-  public:
-    VULKAN_HPP_CONSTEXPR Event()
-      : m_event(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_CONSTEXPR Event( std::nullptr_t )
-      : m_event(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT Event( VkEvent event )
-      : m_event( event )
-    {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
-    Event & operator=(VkEvent event)
-    {
-      m_event = event;
-      return *this; 
-    }
-#endif
-
-    Event & operator=( std::nullptr_t )
-    {
-      m_event = VK_NULL_HANDLE;
-      return *this;
-    }
-
-    bool operator==( Event const & rhs ) const
-    {
-      return m_event == rhs.m_event;
-    }
-
-    bool operator!=(Event const & rhs ) const
-    {
-      return m_event != rhs.m_event;
-    }
-
-    bool operator<(Event const & rhs ) const
-    {
-      return m_event < rhs.m_event;
-    }
-
-
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkEvent() const
-    {
-      return m_event;
-    }
-
-    explicit operator bool() const
-    {
-      return m_event != VK_NULL_HANDLE;
-    }
-
-    bool operator!() const
-    {
-      return m_event == VK_NULL_HANDLE;
-    }
-
-  private:
-    VkEvent m_event;
-  };
-
-  static_assert( sizeof( Event ) == sizeof( VkEvent ), "handle and wrapper have different size!" );
-
-  class QueryPool
-  {
-  public:
-    VULKAN_HPP_CONSTEXPR QueryPool()
-      : m_queryPool(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_CONSTEXPR QueryPool( std::nullptr_t )
-      : m_queryPool(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT QueryPool( VkQueryPool queryPool )
-      : m_queryPool( queryPool )
-    {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
-    QueryPool & operator=(VkQueryPool queryPool)
-    {
-      m_queryPool = queryPool;
-      return *this; 
-    }
-#endif
-
-    QueryPool & operator=( std::nullptr_t )
-    {
-      m_queryPool = VK_NULL_HANDLE;
-      return *this;
-    }
-
-    bool operator==( QueryPool const & rhs ) const
-    {
-      return m_queryPool == rhs.m_queryPool;
-    }
-
-    bool operator!=(QueryPool const & rhs ) const
-    {
-      return m_queryPool != rhs.m_queryPool;
-    }
-
-    bool operator<(QueryPool const & rhs ) const
-    {
-      return m_queryPool < rhs.m_queryPool;
-    }
-
-
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkQueryPool() const
-    {
-      return m_queryPool;
-    }
-
-    explicit operator bool() const
-    {
-      return m_queryPool != VK_NULL_HANDLE;
-    }
-
-    bool operator!() const
-    {
-      return m_queryPool == VK_NULL_HANDLE;
-    }
-
-  private:
-    VkQueryPool m_queryPool;
-  };
-
-  static_assert( sizeof( QueryPool ) == sizeof( VkQueryPool ), "handle and wrapper have different size!" );
-
-  class Framebuffer
-  {
-  public:
-    VULKAN_HPP_CONSTEXPR Framebuffer()
-      : m_framebuffer(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_CONSTEXPR Framebuffer( std::nullptr_t )
-      : m_framebuffer(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT Framebuffer( VkFramebuffer framebuffer )
-      : m_framebuffer( framebuffer )
-    {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
-    Framebuffer & operator=(VkFramebuffer framebuffer)
-    {
-      m_framebuffer = framebuffer;
-      return *this; 
-    }
-#endif
-
-    Framebuffer & operator=( std::nullptr_t )
-    {
-      m_framebuffer = VK_NULL_HANDLE;
-      return *this;
-    }
-
-    bool operator==( Framebuffer const & rhs ) const
-    {
-      return m_framebuffer == rhs.m_framebuffer;
-    }
-
-    bool operator!=(Framebuffer const & rhs ) const
-    {
-      return m_framebuffer != rhs.m_framebuffer;
-    }
-
-    bool operator<(Framebuffer const & rhs ) const
-    {
-      return m_framebuffer < rhs.m_framebuffer;
-    }
-
-
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkFramebuffer() const
-    {
-      return m_framebuffer;
-    }
-
-    explicit operator bool() const
-    {
-      return m_framebuffer != VK_NULL_HANDLE;
-    }
-
-    bool operator!() const
-    {
-      return m_framebuffer == VK_NULL_HANDLE;
-    }
-
-  private:
-    VkFramebuffer m_framebuffer;
-  };
-
-  static_assert( sizeof( Framebuffer ) == sizeof( VkFramebuffer ), "handle and wrapper have different size!" );
-
-  class RenderPass
-  {
-  public:
-    VULKAN_HPP_CONSTEXPR RenderPass()
-      : m_renderPass(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_CONSTEXPR RenderPass( std::nullptr_t )
-      : m_renderPass(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT RenderPass( VkRenderPass renderPass )
-      : m_renderPass( renderPass )
-    {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
-    RenderPass & operator=(VkRenderPass renderPass)
-    {
-      m_renderPass = renderPass;
-      return *this; 
-    }
-#endif
-
-    RenderPass & operator=( std::nullptr_t )
-    {
-      m_renderPass = VK_NULL_HANDLE;
-      return *this;
-    }
-
-    bool operator==( RenderPass const & rhs ) const
-    {
-      return m_renderPass == rhs.m_renderPass;
-    }
-
-    bool operator!=(RenderPass const & rhs ) const
-    {
-      return m_renderPass != rhs.m_renderPass;
-    }
-
-    bool operator<(RenderPass const & rhs ) const
-    {
-      return m_renderPass < rhs.m_renderPass;
-    }
-
-
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkRenderPass() const
-    {
-      return m_renderPass;
-    }
-
-    explicit operator bool() const
-    {
-      return m_renderPass != VK_NULL_HANDLE;
-    }
-
-    bool operator!() const
-    {
-      return m_renderPass == VK_NULL_HANDLE;
-    }
-
-  private:
-    VkRenderPass m_renderPass;
-  };
-
-  static_assert( sizeof( RenderPass ) == sizeof( VkRenderPass ), "handle and wrapper have different size!" );
-
-  class PipelineCache
-  {
-  public:
-    VULKAN_HPP_CONSTEXPR PipelineCache()
-      : m_pipelineCache(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_CONSTEXPR PipelineCache( std::nullptr_t )
-      : m_pipelineCache(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT PipelineCache( VkPipelineCache pipelineCache )
-      : m_pipelineCache( pipelineCache )
-    {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
-    PipelineCache & operator=(VkPipelineCache pipelineCache)
-    {
-      m_pipelineCache = pipelineCache;
-      return *this; 
-    }
-#endif
-
-    PipelineCache & operator=( std::nullptr_t )
-    {
-      m_pipelineCache = VK_NULL_HANDLE;
-      return *this;
-    }
-
-    bool operator==( PipelineCache const & rhs ) const
-    {
-      return m_pipelineCache == rhs.m_pipelineCache;
-    }
-
-    bool operator!=(PipelineCache const & rhs ) const
-    {
-      return m_pipelineCache != rhs.m_pipelineCache;
-    }
-
-    bool operator<(PipelineCache const & rhs ) const
-    {
-      return m_pipelineCache < rhs.m_pipelineCache;
-    }
-
-
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipelineCache() const
-    {
-      return m_pipelineCache;
-    }
-
-    explicit operator bool() const
-    {
-      return m_pipelineCache != VK_NULL_HANDLE;
-    }
-
-    bool operator!() const
-    {
-      return m_pipelineCache == VK_NULL_HANDLE;
-    }
-
-  private:
-    VkPipelineCache m_pipelineCache;
-  };
-
-  static_assert( sizeof( PipelineCache ) == sizeof( VkPipelineCache ), "handle and wrapper have different size!" );
-
-  class ObjectTableNVX
-  {
-  public:
-    VULKAN_HPP_CONSTEXPR ObjectTableNVX()
-      : m_objectTableNVX(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_CONSTEXPR ObjectTableNVX( std::nullptr_t )
-      : m_objectTableNVX(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT ObjectTableNVX( VkObjectTableNVX objectTableNVX )
-      : m_objectTableNVX( objectTableNVX )
-    {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
-    ObjectTableNVX & operator=(VkObjectTableNVX objectTableNVX)
-    {
-      m_objectTableNVX = objectTableNVX;
-      return *this; 
-    }
-#endif
-
-    ObjectTableNVX & operator=( std::nullptr_t )
-    {
-      m_objectTableNVX = VK_NULL_HANDLE;
-      return *this;
-    }
-
-    bool operator==( ObjectTableNVX const & rhs ) const
-    {
-      return m_objectTableNVX == rhs.m_objectTableNVX;
-    }
-
-    bool operator!=(ObjectTableNVX const & rhs ) const
-    {
-      return m_objectTableNVX != rhs.m_objectTableNVX;
-    }
-
-    bool operator<(ObjectTableNVX const & rhs ) const
-    {
-      return m_objectTableNVX < rhs.m_objectTableNVX;
-    }
-
-
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkObjectTableNVX() const
-    {
-      return m_objectTableNVX;
-    }
-
-    explicit operator bool() const
-    {
-      return m_objectTableNVX != VK_NULL_HANDLE;
-    }
-
-    bool operator!() const
-    {
-      return m_objectTableNVX == VK_NULL_HANDLE;
-    }
-
-  private:
-    VkObjectTableNVX m_objectTableNVX;
-  };
-
-  static_assert( sizeof( ObjectTableNVX ) == sizeof( VkObjectTableNVX ), "handle and wrapper have different size!" );
-
-  class IndirectCommandsLayoutNVX
-  {
-  public:
-    VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNVX()
-      : m_indirectCommandsLayoutNVX(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNVX( std::nullptr_t )
-      : m_indirectCommandsLayoutNVX(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT IndirectCommandsLayoutNVX( VkIndirectCommandsLayoutNVX indirectCommandsLayoutNVX )
-      : m_indirectCommandsLayoutNVX( indirectCommandsLayoutNVX )
-    {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
-    IndirectCommandsLayoutNVX & operator=(VkIndirectCommandsLayoutNVX indirectCommandsLayoutNVX)
-    {
-      m_indirectCommandsLayoutNVX = indirectCommandsLayoutNVX;
-      return *this; 
-    }
-#endif
-
-    IndirectCommandsLayoutNVX & operator=( std::nullptr_t )
-    {
-      m_indirectCommandsLayoutNVX = VK_NULL_HANDLE;
-      return *this;
-    }
-
-    bool operator==( IndirectCommandsLayoutNVX const & rhs ) const
-    {
-      return m_indirectCommandsLayoutNVX == rhs.m_indirectCommandsLayoutNVX;
-    }
-
-    bool operator!=(IndirectCommandsLayoutNVX const & rhs ) const
-    {
-      return m_indirectCommandsLayoutNVX != rhs.m_indirectCommandsLayoutNVX;
-    }
-
-    bool operator<(IndirectCommandsLayoutNVX const & rhs ) const
-    {
-      return m_indirectCommandsLayoutNVX < rhs.m_indirectCommandsLayoutNVX;
-    }
-
-
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkIndirectCommandsLayoutNVX() const
-    {
-      return m_indirectCommandsLayoutNVX;
-    }
-
-    explicit operator bool() const
-    {
-      return m_indirectCommandsLayoutNVX != VK_NULL_HANDLE;
-    }
-
-    bool operator!() const
-    {
-      return m_indirectCommandsLayoutNVX == VK_NULL_HANDLE;
-    }
-
-  private:
-    VkIndirectCommandsLayoutNVX m_indirectCommandsLayoutNVX;
-  };
-
-  static_assert( sizeof( IndirectCommandsLayoutNVX ) == sizeof( VkIndirectCommandsLayoutNVX ), "handle and wrapper have different size!" );
-
-  class DescriptorUpdateTemplate
-  {
-  public:
-    VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplate()
-      : m_descriptorUpdateTemplate(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplate( std::nullptr_t )
-      : m_descriptorUpdateTemplate(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorUpdateTemplate( VkDescriptorUpdateTemplate descriptorUpdateTemplate )
-      : m_descriptorUpdateTemplate( descriptorUpdateTemplate )
-    {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
-    DescriptorUpdateTemplate & operator=(VkDescriptorUpdateTemplate descriptorUpdateTemplate)
-    {
-      m_descriptorUpdateTemplate = descriptorUpdateTemplate;
-      return *this; 
-    }
-#endif
-
-    DescriptorUpdateTemplate & operator=( std::nullptr_t )
-    {
-      m_descriptorUpdateTemplate = VK_NULL_HANDLE;
-      return *this;
-    }
-
-    bool operator==( DescriptorUpdateTemplate const & rhs ) const
-    {
-      return m_descriptorUpdateTemplate == rhs.m_descriptorUpdateTemplate;
-    }
-
-    bool operator!=(DescriptorUpdateTemplate const & rhs ) const
-    {
-      return m_descriptorUpdateTemplate != rhs.m_descriptorUpdateTemplate;
-    }
-
-    bool operator<(DescriptorUpdateTemplate const & rhs ) const
-    {
-      return m_descriptorUpdateTemplate < rhs.m_descriptorUpdateTemplate;
-    }
-
-
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorUpdateTemplate() const
-    {
-      return m_descriptorUpdateTemplate;
-    }
-
-    explicit operator bool() const
-    {
-      return m_descriptorUpdateTemplate != VK_NULL_HANDLE;
-    }
-
-    bool operator!() const
-    {
-      return m_descriptorUpdateTemplate == VK_NULL_HANDLE;
-    }
-
-  private:
-    VkDescriptorUpdateTemplate m_descriptorUpdateTemplate;
-  };
-
-  static_assert( sizeof( DescriptorUpdateTemplate ) == sizeof( VkDescriptorUpdateTemplate ), "handle and wrapper have different size!" );
-
-  using DescriptorUpdateTemplateKHR = DescriptorUpdateTemplate;
-
-  class SamplerYcbcrConversion
-  {
-  public:
-    VULKAN_HPP_CONSTEXPR SamplerYcbcrConversion()
-      : m_samplerYcbcrConversion(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_CONSTEXPR SamplerYcbcrConversion( std::nullptr_t )
-      : m_samplerYcbcrConversion(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT SamplerYcbcrConversion( VkSamplerYcbcrConversion samplerYcbcrConversion )
-      : m_samplerYcbcrConversion( samplerYcbcrConversion )
-    {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
-    SamplerYcbcrConversion & operator=(VkSamplerYcbcrConversion samplerYcbcrConversion)
-    {
-      m_samplerYcbcrConversion = samplerYcbcrConversion;
-      return *this; 
-    }
-#endif
-
-    SamplerYcbcrConversion & operator=( std::nullptr_t )
-    {
-      m_samplerYcbcrConversion = VK_NULL_HANDLE;
-      return *this;
-    }
-
-    bool operator==( SamplerYcbcrConversion const & rhs ) const
-    {
-      return m_samplerYcbcrConversion == rhs.m_samplerYcbcrConversion;
-    }
-
-    bool operator!=(SamplerYcbcrConversion const & rhs ) const
-    {
-      return m_samplerYcbcrConversion != rhs.m_samplerYcbcrConversion;
-    }
-
-    bool operator<(SamplerYcbcrConversion const & rhs ) const
-    {
-      return m_samplerYcbcrConversion < rhs.m_samplerYcbcrConversion;
-    }
-
-
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSamplerYcbcrConversion() const
-    {
-      return m_samplerYcbcrConversion;
-    }
-
-    explicit operator bool() const
-    {
-      return m_samplerYcbcrConversion != VK_NULL_HANDLE;
-    }
-
-    bool operator!() const
-    {
-      return m_samplerYcbcrConversion == VK_NULL_HANDLE;
-    }
-
-  private:
-    VkSamplerYcbcrConversion m_samplerYcbcrConversion;
-  };
-
-  static_assert( sizeof( SamplerYcbcrConversion ) == sizeof( VkSamplerYcbcrConversion ), "handle and wrapper have different size!" );
-
-  using SamplerYcbcrConversionKHR = SamplerYcbcrConversion;
-
-  class ValidationCacheEXT
-  {
-  public:
-    VULKAN_HPP_CONSTEXPR ValidationCacheEXT()
-      : m_validationCacheEXT(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_CONSTEXPR ValidationCacheEXT( std::nullptr_t )
-      : m_validationCacheEXT(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT ValidationCacheEXT( VkValidationCacheEXT validationCacheEXT )
-      : m_validationCacheEXT( validationCacheEXT )
-    {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
-    ValidationCacheEXT & operator=(VkValidationCacheEXT validationCacheEXT)
-    {
-      m_validationCacheEXT = validationCacheEXT;
-      return *this; 
-    }
-#endif
-
-    ValidationCacheEXT & operator=( std::nullptr_t )
-    {
-      m_validationCacheEXT = VK_NULL_HANDLE;
-      return *this;
-    }
-
-    bool operator==( ValidationCacheEXT const & rhs ) const
-    {
-      return m_validationCacheEXT == rhs.m_validationCacheEXT;
-    }
-
-    bool operator!=(ValidationCacheEXT const & rhs ) const
-    {
-      return m_validationCacheEXT != rhs.m_validationCacheEXT;
-    }
-
-    bool operator<(ValidationCacheEXT const & rhs ) const
-    {
-      return m_validationCacheEXT < rhs.m_validationCacheEXT;
-    }
-
-
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkValidationCacheEXT() const
-    {
-      return m_validationCacheEXT;
-    }
-
-    explicit operator bool() const
-    {
-      return m_validationCacheEXT != VK_NULL_HANDLE;
-    }
-
-    bool operator!() const
-    {
-      return m_validationCacheEXT == VK_NULL_HANDLE;
-    }
-
-  private:
-    VkValidationCacheEXT m_validationCacheEXT;
-  };
-
-  static_assert( sizeof( ValidationCacheEXT ) == sizeof( VkValidationCacheEXT ), "handle and wrapper have different size!" );
-
-  class AccelerationStructureNV
-  {
-  public:
-    VULKAN_HPP_CONSTEXPR AccelerationStructureNV()
-      : m_accelerationStructureNV(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_CONSTEXPR AccelerationStructureNV( std::nullptr_t )
-      : m_accelerationStructureNV(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT AccelerationStructureNV( VkAccelerationStructureNV accelerationStructureNV )
-      : m_accelerationStructureNV( accelerationStructureNV )
-    {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
-    AccelerationStructureNV & operator=(VkAccelerationStructureNV accelerationStructureNV)
-    {
-      m_accelerationStructureNV = accelerationStructureNV;
-      return *this; 
-    }
-#endif
-
-    AccelerationStructureNV & operator=( std::nullptr_t )
-    {
-      m_accelerationStructureNV = VK_NULL_HANDLE;
-      return *this;
-    }
-
-    bool operator==( AccelerationStructureNV const & rhs ) const
-    {
-      return m_accelerationStructureNV == rhs.m_accelerationStructureNV;
-    }
-
-    bool operator!=(AccelerationStructureNV const & rhs ) const
-    {
-      return m_accelerationStructureNV != rhs.m_accelerationStructureNV;
-    }
-
-    bool operator<(AccelerationStructureNV const & rhs ) const
-    {
-      return m_accelerationStructureNV < rhs.m_accelerationStructureNV;
-    }
-
-
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkAccelerationStructureNV() const
-    {
-      return m_accelerationStructureNV;
-    }
-
-    explicit operator bool() const
-    {
-      return m_accelerationStructureNV != VK_NULL_HANDLE;
-    }
-
-    bool operator!() const
-    {
-      return m_accelerationStructureNV == VK_NULL_HANDLE;
-    }
-
-  private:
-    VkAccelerationStructureNV m_accelerationStructureNV;
-  };
-
-  static_assert( sizeof( AccelerationStructureNV ) == sizeof( VkAccelerationStructureNV ), "handle and wrapper have different size!" );
-
-  class DisplayKHR
-  {
-  public:
-    VULKAN_HPP_CONSTEXPR DisplayKHR()
-      : m_displayKHR(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_CONSTEXPR DisplayKHR( std::nullptr_t )
-      : m_displayKHR(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT DisplayKHR( VkDisplayKHR displayKHR )
-      : m_displayKHR( displayKHR )
-    {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
-    DisplayKHR & operator=(VkDisplayKHR displayKHR)
-    {
-      m_displayKHR = displayKHR;
-      return *this; 
-    }
-#endif
-
-    DisplayKHR & operator=( std::nullptr_t )
-    {
-      m_displayKHR = VK_NULL_HANDLE;
-      return *this;
-    }
-
-    bool operator==( DisplayKHR const & rhs ) const
-    {
-      return m_displayKHR == rhs.m_displayKHR;
-    }
-
-    bool operator!=(DisplayKHR const & rhs ) const
-    {
-      return m_displayKHR != rhs.m_displayKHR;
-    }
-
-    bool operator<(DisplayKHR const & rhs ) const
-    {
-      return m_displayKHR < rhs.m_displayKHR;
-    }
-
-
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDisplayKHR() const
-    {
-      return m_displayKHR;
-    }
-
-    explicit operator bool() const
-    {
-      return m_displayKHR != VK_NULL_HANDLE;
-    }
-
-    bool operator!() const
-    {
-      return m_displayKHR == VK_NULL_HANDLE;
-    }
-
-  private:
-    VkDisplayKHR m_displayKHR;
-  };
-
-  static_assert( sizeof( DisplayKHR ) == sizeof( VkDisplayKHR ), "handle and wrapper have different size!" );
-
-  class DisplayModeKHR
-  {
-  public:
-    VULKAN_HPP_CONSTEXPR DisplayModeKHR()
-      : m_displayModeKHR(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_CONSTEXPR DisplayModeKHR( std::nullptr_t )
-      : m_displayModeKHR(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT DisplayModeKHR( VkDisplayModeKHR displayModeKHR )
-      : m_displayModeKHR( displayModeKHR )
-    {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
-    DisplayModeKHR & operator=(VkDisplayModeKHR displayModeKHR)
-    {
-      m_displayModeKHR = displayModeKHR;
-      return *this; 
-    }
-#endif
-
-    DisplayModeKHR & operator=( std::nullptr_t )
-    {
-      m_displayModeKHR = VK_NULL_HANDLE;
-      return *this;
-    }
-
-    bool operator==( DisplayModeKHR const & rhs ) const
-    {
-      return m_displayModeKHR == rhs.m_displayModeKHR;
-    }
-
-    bool operator!=(DisplayModeKHR const & rhs ) const
-    {
-      return m_displayModeKHR != rhs.m_displayModeKHR;
-    }
-
-    bool operator<(DisplayModeKHR const & rhs ) const
-    {
-      return m_displayModeKHR < rhs.m_displayModeKHR;
-    }
-
-
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDisplayModeKHR() const
-    {
-      return m_displayModeKHR;
-    }
-
-    explicit operator bool() const
-    {
-      return m_displayModeKHR != VK_NULL_HANDLE;
-    }
-
-    bool operator!() const
-    {
-      return m_displayModeKHR == VK_NULL_HANDLE;
-    }
-
-  private:
-    VkDisplayModeKHR m_displayModeKHR;
-  };
-
-  static_assert( sizeof( DisplayModeKHR ) == sizeof( VkDisplayModeKHR ), "handle and wrapper have different size!" );
-
-  class SurfaceKHR
-  {
-  public:
-    VULKAN_HPP_CONSTEXPR SurfaceKHR()
-      : m_surfaceKHR(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_CONSTEXPR SurfaceKHR( std::nullptr_t )
-      : m_surfaceKHR(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT SurfaceKHR( VkSurfaceKHR surfaceKHR )
-      : m_surfaceKHR( surfaceKHR )
-    {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
-    SurfaceKHR & operator=(VkSurfaceKHR surfaceKHR)
-    {
-      m_surfaceKHR = surfaceKHR;
-      return *this; 
-    }
-#endif
-
-    SurfaceKHR & operator=( std::nullptr_t )
-    {
-      m_surfaceKHR = VK_NULL_HANDLE;
-      return *this;
-    }
-
-    bool operator==( SurfaceKHR const & rhs ) const
-    {
-      return m_surfaceKHR == rhs.m_surfaceKHR;
-    }
-
-    bool operator!=(SurfaceKHR const & rhs ) const
-    {
-      return m_surfaceKHR != rhs.m_surfaceKHR;
-    }
-
-    bool operator<(SurfaceKHR const & rhs ) const
-    {
-      return m_surfaceKHR < rhs.m_surfaceKHR;
-    }
-
-
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSurfaceKHR() const
-    {
-      return m_surfaceKHR;
-    }
-
-    explicit operator bool() const
-    {
-      return m_surfaceKHR != VK_NULL_HANDLE;
-    }
-
-    bool operator!() const
-    {
-      return m_surfaceKHR == VK_NULL_HANDLE;
-    }
-
-  private:
-    VkSurfaceKHR m_surfaceKHR;
-  };
-
-  static_assert( sizeof( SurfaceKHR ) == sizeof( VkSurfaceKHR ), "handle and wrapper have different size!" );
-
-  class SwapchainKHR
-  {
-  public:
-    VULKAN_HPP_CONSTEXPR SwapchainKHR()
-      : m_swapchainKHR(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_CONSTEXPR SwapchainKHR( std::nullptr_t )
-      : m_swapchainKHR(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT SwapchainKHR( VkSwapchainKHR swapchainKHR )
-      : m_swapchainKHR( swapchainKHR )
-    {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
-    SwapchainKHR & operator=(VkSwapchainKHR swapchainKHR)
-    {
-      m_swapchainKHR = swapchainKHR;
-      return *this; 
-    }
-#endif
-
-    SwapchainKHR & operator=( std::nullptr_t )
-    {
-      m_swapchainKHR = VK_NULL_HANDLE;
-      return *this;
-    }
-
-    bool operator==( SwapchainKHR const & rhs ) const
-    {
-      return m_swapchainKHR == rhs.m_swapchainKHR;
-    }
-
-    bool operator!=(SwapchainKHR const & rhs ) const
-    {
-      return m_swapchainKHR != rhs.m_swapchainKHR;
-    }
-
-    bool operator<(SwapchainKHR const & rhs ) const
-    {
-      return m_swapchainKHR < rhs.m_swapchainKHR;
-    }
-
-
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSwapchainKHR() const
-    {
-      return m_swapchainKHR;
-    }
-
-    explicit operator bool() const
-    {
-      return m_swapchainKHR != VK_NULL_HANDLE;
-    }
-
-    bool operator!() const
-    {
-      return m_swapchainKHR == VK_NULL_HANDLE;
-    }
-
-  private:
-    VkSwapchainKHR m_swapchainKHR;
-  };
-
-  static_assert( sizeof( SwapchainKHR ) == sizeof( VkSwapchainKHR ), "handle and wrapper have different size!" );
-
-  class DebugReportCallbackEXT
-  {
-  public:
-    VULKAN_HPP_CONSTEXPR DebugReportCallbackEXT()
-      : m_debugReportCallbackEXT(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_CONSTEXPR DebugReportCallbackEXT( std::nullptr_t )
-      : m_debugReportCallbackEXT(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT DebugReportCallbackEXT( VkDebugReportCallbackEXT debugReportCallbackEXT )
-      : m_debugReportCallbackEXT( debugReportCallbackEXT )
-    {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
-    DebugReportCallbackEXT & operator=(VkDebugReportCallbackEXT debugReportCallbackEXT)
-    {
-      m_debugReportCallbackEXT = debugReportCallbackEXT;
-      return *this; 
-    }
-#endif
-
-    DebugReportCallbackEXT & operator=( std::nullptr_t )
-    {
-      m_debugReportCallbackEXT = VK_NULL_HANDLE;
-      return *this;
-    }
-
-    bool operator==( DebugReportCallbackEXT const & rhs ) const
-    {
-      return m_debugReportCallbackEXT == rhs.m_debugReportCallbackEXT;
-    }
-
-    bool operator!=(DebugReportCallbackEXT const & rhs ) const
-    {
-      return m_debugReportCallbackEXT != rhs.m_debugReportCallbackEXT;
-    }
-
-    bool operator<(DebugReportCallbackEXT const & rhs ) const
-    {
-      return m_debugReportCallbackEXT < rhs.m_debugReportCallbackEXT;
-    }
-
-
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDebugReportCallbackEXT() const
-    {
-      return m_debugReportCallbackEXT;
-    }
-
-    explicit operator bool() const
-    {
-      return m_debugReportCallbackEXT != VK_NULL_HANDLE;
-    }
-
-    bool operator!() const
-    {
-      return m_debugReportCallbackEXT == VK_NULL_HANDLE;
-    }
-
-  private:
-    VkDebugReportCallbackEXT m_debugReportCallbackEXT;
-  };
-
-  static_assert( sizeof( DebugReportCallbackEXT ) == sizeof( VkDebugReportCallbackEXT ), "handle and wrapper have different size!" );
-
-  class DebugUtilsMessengerEXT
-  {
-  public:
-    VULKAN_HPP_CONSTEXPR DebugUtilsMessengerEXT()
-      : m_debugUtilsMessengerEXT(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_CONSTEXPR DebugUtilsMessengerEXT( std::nullptr_t )
-      : m_debugUtilsMessengerEXT(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT DebugUtilsMessengerEXT( VkDebugUtilsMessengerEXT debugUtilsMessengerEXT )
-      : m_debugUtilsMessengerEXT( debugUtilsMessengerEXT )
-    {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
-    DebugUtilsMessengerEXT & operator=(VkDebugUtilsMessengerEXT debugUtilsMessengerEXT)
-    {
-      m_debugUtilsMessengerEXT = debugUtilsMessengerEXT;
-      return *this; 
-    }
-#endif
-
-    DebugUtilsMessengerEXT & operator=( std::nullptr_t )
-    {
-      m_debugUtilsMessengerEXT = VK_NULL_HANDLE;
-      return *this;
-    }
-
-    bool operator==( DebugUtilsMessengerEXT const & rhs ) const
-    {
-      return m_debugUtilsMessengerEXT == rhs.m_debugUtilsMessengerEXT;
-    }
-
-    bool operator!=(DebugUtilsMessengerEXT const & rhs ) const
-    {
-      return m_debugUtilsMessengerEXT != rhs.m_debugUtilsMessengerEXT;
-    }
-
-    bool operator<(DebugUtilsMessengerEXT const & rhs ) const
-    {
-      return m_debugUtilsMessengerEXT < rhs.m_debugUtilsMessengerEXT;
-    }
-
-
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDebugUtilsMessengerEXT() const
-    {
-      return m_debugUtilsMessengerEXT;
-    }
-
-    explicit operator bool() const
-    {
-      return m_debugUtilsMessengerEXT != VK_NULL_HANDLE;
-    }
-
-    bool operator!() const
-    {
-      return m_debugUtilsMessengerEXT == VK_NULL_HANDLE;
-    }
-
-  private:
-    VkDebugUtilsMessengerEXT m_debugUtilsMessengerEXT;
-  };
-
-  static_assert( sizeof( DebugUtilsMessengerEXT ) == sizeof( VkDebugUtilsMessengerEXT ), "handle and wrapper have different size!" );
-
-  struct Offset2D
-  {
-    Offset2D( int32_t x_ = 0,
-              int32_t y_ = 0 )
-      : x( x_ )
-      , y( y_ )
-    {
-    }
-
-    Offset2D( VkOffset2D const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( Offset2D ) );
-    }
-
-    Offset2D& operator=( VkOffset2D const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( Offset2D ) );
-      return *this;
-    }
-    Offset2D& setX( int32_t x_ )
-    {
-      x = x_;
-      return *this;
-    }
-
-    Offset2D& setY( int32_t y_ )
-    {
-      y = y_;
-      return *this;
-    }
-
-    operator VkOffset2D const&() const
-    {
-      return *reinterpret_cast<const VkOffset2D*>(this);
-    }
-
-    operator VkOffset2D &()
-    {
-      return *reinterpret_cast<VkOffset2D*>(this);
-    }
-
-    bool operator==( Offset2D const& rhs ) const
-    {
-      return ( x == rhs.x )
-          && ( y == rhs.y );
-    }
-
-    bool operator!=( Offset2D const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    int32_t x;
-    int32_t y;
-  };
-  static_assert( sizeof( Offset2D ) == sizeof( VkOffset2D ), "struct and wrapper have different size!" );
-
-  struct Offset3D
-  {
-    Offset3D( int32_t x_ = 0,
-              int32_t y_ = 0,
-              int32_t z_ = 0 )
-      : x( x_ )
-      , y( y_ )
-      , z( z_ )
-    {
-    }
-
-    explicit Offset3D( Offset2D const& offset2D,
-                       int32_t z_ = 0 )
-      : x( offset2D.x )
-      , y( offset2D.y )
-      , z( z_ )
-    {}
-
-    Offset3D( VkOffset3D const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( Offset3D ) );
-    }
-
-    Offset3D& operator=( VkOffset3D const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( Offset3D ) );
-      return *this;
-    }
-    Offset3D& setX( int32_t x_ )
-    {
-      x = x_;
-      return *this;
-    }
-
-    Offset3D& setY( int32_t y_ )
-    {
-      y = y_;
-      return *this;
-    }
-
-    Offset3D& setZ( int32_t z_ )
-    {
-      z = z_;
-      return *this;
-    }
-
-    operator VkOffset3D const&() const
-    {
-      return *reinterpret_cast<const VkOffset3D*>(this);
-    }
-
-    operator VkOffset3D &()
-    {
-      return *reinterpret_cast<VkOffset3D*>(this);
-    }
-
-    bool operator==( Offset3D const& rhs ) const
-    {
-      return ( x == rhs.x )
-          && ( y == rhs.y )
-          && ( z == rhs.z );
-    }
-
-    bool operator!=( Offset3D const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    int32_t x;
-    int32_t y;
-    int32_t z;
-  };
-  static_assert( sizeof( Offset3D ) == sizeof( VkOffset3D ), "struct and wrapper have different size!" );
-
-  struct Extent2D
-  {
-    Extent2D( uint32_t width_ = 0,
-              uint32_t height_ = 0 )
-      : width( width_ )
-      , height( height_ )
-    {
-    }
-
-    Extent2D( VkExtent2D const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( Extent2D ) );
-    }
-
-    Extent2D& operator=( VkExtent2D const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( Extent2D ) );
-      return *this;
-    }
-    Extent2D& setWidth( uint32_t width_ )
-    {
-      width = width_;
-      return *this;
-    }
-
-    Extent2D& setHeight( uint32_t height_ )
-    {
-      height = height_;
-      return *this;
-    }
-
-    operator VkExtent2D const&() const
-    {
-      return *reinterpret_cast<const VkExtent2D*>(this);
-    }
-
-    operator VkExtent2D &()
-    {
-      return *reinterpret_cast<VkExtent2D*>(this);
-    }
-
-    bool operator==( Extent2D const& rhs ) const
-    {
-      return ( width == rhs.width )
-          && ( height == rhs.height );
-    }
-
-    bool operator!=( Extent2D const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    uint32_t width;
-    uint32_t height;
-  };
-  static_assert( sizeof( Extent2D ) == sizeof( VkExtent2D ), "struct and wrapper have different size!" );
-
-  struct Extent3D
-  {
-    Extent3D( uint32_t width_ = 0,
-              uint32_t height_ = 0,
-              uint32_t depth_ = 0 )
-      : width( width_ )
-      , height( height_ )
-      , depth( depth_ )
-    {
-    }
-
-    explicit Extent3D( Extent2D const& extent2D,
-                       uint32_t depth_ = 0 )
-      : width( extent2D.width )
-      , height( extent2D.height )
-      , depth( depth_ )
-    {}
-
-    Extent3D( VkExtent3D const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( Extent3D ) );
-    }
-
-    Extent3D& operator=( VkExtent3D const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( Extent3D ) );
-      return *this;
-    }
-    Extent3D& setWidth( uint32_t width_ )
-    {
-      width = width_;
-      return *this;
-    }
-
-    Extent3D& setHeight( uint32_t height_ )
-    {
-      height = height_;
-      return *this;
-    }
-
-    Extent3D& setDepth( uint32_t depth_ )
-    {
-      depth = depth_;
-      return *this;
-    }
-
-    operator VkExtent3D const&() const
-    {
-      return *reinterpret_cast<const VkExtent3D*>(this);
-    }
-
-    operator VkExtent3D &()
-    {
-      return *reinterpret_cast<VkExtent3D*>(this);
-    }
-
-    bool operator==( Extent3D const& rhs ) const
-    {
-      return ( width == rhs.width )
-          && ( height == rhs.height )
-          && ( depth == rhs.depth );
-    }
-
-    bool operator!=( Extent3D const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    uint32_t width;
-    uint32_t height;
-    uint32_t depth;
-  };
-  static_assert( sizeof( Extent3D ) == sizeof( VkExtent3D ), "struct and wrapper have different size!" );
-
-  struct Viewport
-  {
-    Viewport( float x_ = 0,
-              float y_ = 0,
-              float width_ = 0,
-              float height_ = 0,
-              float minDepth_ = 0,
-              float maxDepth_ = 0 )
-      : x( x_ )
-      , y( y_ )
-      , width( width_ )
-      , height( height_ )
-      , minDepth( minDepth_ )
-      , maxDepth( maxDepth_ )
-    {
-    }
-
-    Viewport( VkViewport const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( Viewport ) );
-    }
-
-    Viewport& operator=( VkViewport const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( Viewport ) );
-      return *this;
-    }
-    Viewport& setX( float x_ )
-    {
-      x = x_;
-      return *this;
-    }
-
-    Viewport& setY( float y_ )
-    {
-      y = y_;
-      return *this;
-    }
-
-    Viewport& setWidth( float width_ )
-    {
-      width = width_;
-      return *this;
-    }
-
-    Viewport& setHeight( float height_ )
-    {
-      height = height_;
-      return *this;
-    }
-
-    Viewport& setMinDepth( float minDepth_ )
-    {
-      minDepth = minDepth_;
-      return *this;
-    }
-
-    Viewport& setMaxDepth( float maxDepth_ )
-    {
-      maxDepth = maxDepth_;
-      return *this;
-    }
-
-    operator VkViewport const&() const
-    {
-      return *reinterpret_cast<const VkViewport*>(this);
-    }
-
-    operator VkViewport &()
-    {
-      return *reinterpret_cast<VkViewport*>(this);
-    }
-
-    bool operator==( Viewport const& rhs ) const
-    {
-      return ( x == rhs.x )
-          && ( y == rhs.y )
-          && ( width == rhs.width )
-          && ( height == rhs.height )
-          && ( minDepth == rhs.minDepth )
-          && ( maxDepth == rhs.maxDepth );
-    }
-
-    bool operator!=( Viewport const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    float x;
-    float y;
-    float width;
-    float height;
-    float minDepth;
-    float maxDepth;
-  };
-  static_assert( sizeof( Viewport ) == sizeof( VkViewport ), "struct and wrapper have different size!" );
-
-  struct Rect2D
-  {
-    Rect2D( Offset2D offset_ = Offset2D(),
-            Extent2D extent_ = Extent2D() )
-      : offset( offset_ )
-      , extent( extent_ )
-    {
-    }
-
-    Rect2D( VkRect2D const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( Rect2D ) );
-    }
-
-    Rect2D& operator=( VkRect2D const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( Rect2D ) );
-      return *this;
-    }
-    Rect2D& setOffset( Offset2D offset_ )
-    {
-      offset = offset_;
-      return *this;
-    }
-
-    Rect2D& setExtent( Extent2D extent_ )
-    {
-      extent = extent_;
-      return *this;
-    }
-
-    operator VkRect2D const&() const
-    {
-      return *reinterpret_cast<const VkRect2D*>(this);
-    }
-
-    operator VkRect2D &()
-    {
-      return *reinterpret_cast<VkRect2D*>(this);
-    }
-
-    bool operator==( Rect2D const& rhs ) const
-    {
-      return ( offset == rhs.offset )
-          && ( extent == rhs.extent );
-    }
-
-    bool operator!=( Rect2D const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    Offset2D offset;
-    Extent2D extent;
-  };
-  static_assert( sizeof( Rect2D ) == sizeof( VkRect2D ), "struct and wrapper have different size!" );
-
-  struct ClearRect
-  {
-    ClearRect( Rect2D rect_ = Rect2D(),
-               uint32_t baseArrayLayer_ = 0,
-               uint32_t layerCount_ = 0 )
-      : rect( rect_ )
-      , baseArrayLayer( baseArrayLayer_ )
-      , layerCount( layerCount_ )
-    {
-    }
-
-    ClearRect( VkClearRect const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ClearRect ) );
-    }
-
-    ClearRect& operator=( VkClearRect const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ClearRect ) );
-      return *this;
-    }
-    ClearRect& setRect( Rect2D rect_ )
-    {
-      rect = rect_;
-      return *this;
-    }
-
-    ClearRect& setBaseArrayLayer( uint32_t baseArrayLayer_ )
-    {
-      baseArrayLayer = baseArrayLayer_;
-      return *this;
-    }
-
-    ClearRect& setLayerCount( uint32_t layerCount_ )
-    {
-      layerCount = layerCount_;
-      return *this;
-    }
-
-    operator VkClearRect const&() const
-    {
-      return *reinterpret_cast<const VkClearRect*>(this);
-    }
-
-    operator VkClearRect &()
-    {
-      return *reinterpret_cast<VkClearRect*>(this);
-    }
-
-    bool operator==( ClearRect const& rhs ) const
-    {
-      return ( rect == rhs.rect )
-          && ( baseArrayLayer == rhs.baseArrayLayer )
-          && ( layerCount == rhs.layerCount );
-    }
-
-    bool operator!=( ClearRect const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    Rect2D rect;
-    uint32_t baseArrayLayer;
-    uint32_t layerCount;
-  };
-  static_assert( sizeof( ClearRect ) == sizeof( VkClearRect ), "struct and wrapper have different size!" );
-
-  struct ExtensionProperties
-  {
-    operator VkExtensionProperties const&() const
-    {
-      return *reinterpret_cast<const VkExtensionProperties*>(this);
-    }
-
-    operator VkExtensionProperties &()
-    {
-      return *reinterpret_cast<VkExtensionProperties*>(this);
-    }
-
-    bool operator==( ExtensionProperties const& rhs ) const
-    {
-      return ( memcmp( extensionName, rhs.extensionName, VK_MAX_EXTENSION_NAME_SIZE * sizeof( char ) ) == 0 )
-          && ( specVersion == rhs.specVersion );
-    }
-
-    bool operator!=( ExtensionProperties const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    char extensionName[VK_MAX_EXTENSION_NAME_SIZE];
-    uint32_t specVersion;
-  };
-  static_assert( sizeof( ExtensionProperties ) == sizeof( VkExtensionProperties ), "struct and wrapper have different size!" );
-
-  struct LayerProperties
-  {
-    operator VkLayerProperties const&() const
-    {
-      return *reinterpret_cast<const VkLayerProperties*>(this);
-    }
-
-    operator VkLayerProperties &()
-    {
-      return *reinterpret_cast<VkLayerProperties*>(this);
-    }
-
-    bool operator==( LayerProperties const& rhs ) const
-    {
-      return ( memcmp( layerName, rhs.layerName, VK_MAX_EXTENSION_NAME_SIZE * sizeof( char ) ) == 0 )
-          && ( specVersion == rhs.specVersion )
-          && ( implementationVersion == rhs.implementationVersion )
-          && ( memcmp( description, rhs.description, VK_MAX_DESCRIPTION_SIZE * sizeof( char ) ) == 0 );
-    }
-
-    bool operator!=( LayerProperties const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    char layerName[VK_MAX_EXTENSION_NAME_SIZE];
-    uint32_t specVersion;
-    uint32_t implementationVersion;
-    char description[VK_MAX_DESCRIPTION_SIZE];
-  };
-  static_assert( sizeof( LayerProperties ) == sizeof( VkLayerProperties ), "struct and wrapper have different size!" );
-
-  struct AllocationCallbacks
-  {
-    AllocationCallbacks( void* pUserData_ = nullptr,
-                         PFN_vkAllocationFunction pfnAllocation_ = nullptr,
-                         PFN_vkReallocationFunction pfnReallocation_ = nullptr,
-                         PFN_vkFreeFunction pfnFree_ = nullptr,
-                         PFN_vkInternalAllocationNotification pfnInternalAllocation_ = nullptr,
-                         PFN_vkInternalFreeNotification pfnInternalFree_ = nullptr )
-      : pUserData( pUserData_ )
-      , pfnAllocation( pfnAllocation_ )
-      , pfnReallocation( pfnReallocation_ )
-      , pfnFree( pfnFree_ )
-      , pfnInternalAllocation( pfnInternalAllocation_ )
-      , pfnInternalFree( pfnInternalFree_ )
-    {
-    }
-
-    AllocationCallbacks( VkAllocationCallbacks const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( AllocationCallbacks ) );
-    }
-
-    AllocationCallbacks& operator=( VkAllocationCallbacks const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( AllocationCallbacks ) );
-      return *this;
-    }
-    AllocationCallbacks& setPUserData( void* pUserData_ )
-    {
-      pUserData = pUserData_;
-      return *this;
-    }
-
-    AllocationCallbacks& setPfnAllocation( PFN_vkAllocationFunction pfnAllocation_ )
-    {
-      pfnAllocation = pfnAllocation_;
-      return *this;
-    }
-
-    AllocationCallbacks& setPfnReallocation( PFN_vkReallocationFunction pfnReallocation_ )
-    {
-      pfnReallocation = pfnReallocation_;
-      return *this;
-    }
-
-    AllocationCallbacks& setPfnFree( PFN_vkFreeFunction pfnFree_ )
-    {
-      pfnFree = pfnFree_;
-      return *this;
-    }
-
-    AllocationCallbacks& setPfnInternalAllocation( PFN_vkInternalAllocationNotification pfnInternalAllocation_ )
-    {
-      pfnInternalAllocation = pfnInternalAllocation_;
-      return *this;
-    }
-
-    AllocationCallbacks& setPfnInternalFree( PFN_vkInternalFreeNotification pfnInternalFree_ )
-    {
-      pfnInternalFree = pfnInternalFree_;
-      return *this;
-    }
-
-    operator VkAllocationCallbacks const&() const
-    {
-      return *reinterpret_cast<const VkAllocationCallbacks*>(this);
-    }
-
-    operator VkAllocationCallbacks &()
-    {
-      return *reinterpret_cast<VkAllocationCallbacks*>(this);
-    }
-
-    bool operator==( AllocationCallbacks const& rhs ) const
-    {
-      return ( pUserData == rhs.pUserData )
-          && ( pfnAllocation == rhs.pfnAllocation )
-          && ( pfnReallocation == rhs.pfnReallocation )
-          && ( pfnFree == rhs.pfnFree )
-          && ( pfnInternalAllocation == rhs.pfnInternalAllocation )
-          && ( pfnInternalFree == rhs.pfnInternalFree );
-    }
-
-    bool operator!=( AllocationCallbacks const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    void* pUserData;
-    PFN_vkAllocationFunction pfnAllocation;
-    PFN_vkReallocationFunction pfnReallocation;
-    PFN_vkFreeFunction pfnFree;
-    PFN_vkInternalAllocationNotification pfnInternalAllocation;
-    PFN_vkInternalFreeNotification pfnInternalFree;
-  };
-  static_assert( sizeof( AllocationCallbacks ) == sizeof( VkAllocationCallbacks ), "struct and wrapper have different size!" );
-
-  struct MemoryRequirements
-  {
-    operator VkMemoryRequirements const&() const
-    {
-      return *reinterpret_cast<const VkMemoryRequirements*>(this);
-    }
-
-    operator VkMemoryRequirements &()
-    {
-      return *reinterpret_cast<VkMemoryRequirements*>(this);
-    }
-
-    bool operator==( MemoryRequirements const& rhs ) const
-    {
-      return ( size == rhs.size )
-          && ( alignment == rhs.alignment )
-          && ( memoryTypeBits == rhs.memoryTypeBits );
-    }
-
-    bool operator!=( MemoryRequirements const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    DeviceSize size;
-    DeviceSize alignment;
-    uint32_t memoryTypeBits;
-  };
-  static_assert( sizeof( MemoryRequirements ) == sizeof( VkMemoryRequirements ), "struct and wrapper have different size!" );
-
-  struct DescriptorBufferInfo
-  {
-    DescriptorBufferInfo( Buffer buffer_ = Buffer(),
-                          DeviceSize offset_ = 0,
-                          DeviceSize range_ = 0 )
-      : buffer( buffer_ )
-      , offset( offset_ )
-      , range( range_ )
-    {
-    }
-
-    DescriptorBufferInfo( VkDescriptorBufferInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DescriptorBufferInfo ) );
-    }
-
-    DescriptorBufferInfo& operator=( VkDescriptorBufferInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DescriptorBufferInfo ) );
-      return *this;
-    }
-    DescriptorBufferInfo& setBuffer( Buffer buffer_ )
-    {
-      buffer = buffer_;
-      return *this;
-    }
-
-    DescriptorBufferInfo& setOffset( DeviceSize offset_ )
-    {
-      offset = offset_;
-      return *this;
-    }
-
-    DescriptorBufferInfo& setRange( DeviceSize range_ )
-    {
-      range = range_;
-      return *this;
-    }
-
-    operator VkDescriptorBufferInfo const&() const
-    {
-      return *reinterpret_cast<const VkDescriptorBufferInfo*>(this);
-    }
-
-    operator VkDescriptorBufferInfo &()
-    {
-      return *reinterpret_cast<VkDescriptorBufferInfo*>(this);
-    }
-
-    bool operator==( DescriptorBufferInfo const& rhs ) const
-    {
-      return ( buffer == rhs.buffer )
-          && ( offset == rhs.offset )
-          && ( range == rhs.range );
-    }
-
-    bool operator!=( DescriptorBufferInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    Buffer buffer;
-    DeviceSize offset;
-    DeviceSize range;
-  };
-  static_assert( sizeof( DescriptorBufferInfo ) == sizeof( VkDescriptorBufferInfo ), "struct and wrapper have different size!" );
-
-  struct SubresourceLayout
-  {
-    operator VkSubresourceLayout const&() const
-    {
-      return *reinterpret_cast<const VkSubresourceLayout*>(this);
-    }
-
-    operator VkSubresourceLayout &()
-    {
-      return *reinterpret_cast<VkSubresourceLayout*>(this);
-    }
-
-    bool operator==( SubresourceLayout const& rhs ) const
-    {
-      return ( offset == rhs.offset )
-          && ( size == rhs.size )
-          && ( rowPitch == rhs.rowPitch )
-          && ( arrayPitch == rhs.arrayPitch )
-          && ( depthPitch == rhs.depthPitch );
-    }
-
-    bool operator!=( SubresourceLayout const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    DeviceSize offset;
-    DeviceSize size;
-    DeviceSize rowPitch;
-    DeviceSize arrayPitch;
-    DeviceSize depthPitch;
-  };
-  static_assert( sizeof( SubresourceLayout ) == sizeof( VkSubresourceLayout ), "struct and wrapper have different size!" );
-
-  struct BufferCopy
-  {
-    BufferCopy( DeviceSize srcOffset_ = 0,
-                DeviceSize dstOffset_ = 0,
-                DeviceSize size_ = 0 )
-      : srcOffset( srcOffset_ )
-      , dstOffset( dstOffset_ )
-      , size( size_ )
-    {
-    }
-
-    BufferCopy( VkBufferCopy const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( BufferCopy ) );
-    }
-
-    BufferCopy& operator=( VkBufferCopy const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( BufferCopy ) );
-      return *this;
-    }
-    BufferCopy& setSrcOffset( DeviceSize srcOffset_ )
-    {
-      srcOffset = srcOffset_;
-      return *this;
-    }
-
-    BufferCopy& setDstOffset( DeviceSize dstOffset_ )
-    {
-      dstOffset = dstOffset_;
-      return *this;
-    }
-
-    BufferCopy& setSize( DeviceSize size_ )
-    {
-      size = size_;
-      return *this;
-    }
-
-    operator VkBufferCopy const&() const
-    {
-      return *reinterpret_cast<const VkBufferCopy*>(this);
-    }
-
-    operator VkBufferCopy &()
-    {
-      return *reinterpret_cast<VkBufferCopy*>(this);
-    }
-
-    bool operator==( BufferCopy const& rhs ) const
-    {
-      return ( srcOffset == rhs.srcOffset )
-          && ( dstOffset == rhs.dstOffset )
-          && ( size == rhs.size );
-    }
-
-    bool operator!=( BufferCopy const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    DeviceSize srcOffset;
-    DeviceSize dstOffset;
-    DeviceSize size;
-  };
-  static_assert( sizeof( BufferCopy ) == sizeof( VkBufferCopy ), "struct and wrapper have different size!" );
-
-  struct SpecializationMapEntry
-  {
-    SpecializationMapEntry( uint32_t constantID_ = 0,
-                            uint32_t offset_ = 0,
-                            size_t size_ = 0 )
-      : constantID( constantID_ )
-      , offset( offset_ )
-      , size( size_ )
-    {
-    }
-
-    SpecializationMapEntry( VkSpecializationMapEntry const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( SpecializationMapEntry ) );
-    }
-
-    SpecializationMapEntry& operator=( VkSpecializationMapEntry const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( SpecializationMapEntry ) );
-      return *this;
-    }
-    SpecializationMapEntry& setConstantID( uint32_t constantID_ )
-    {
-      constantID = constantID_;
-      return *this;
-    }
-
-    SpecializationMapEntry& setOffset( uint32_t offset_ )
-    {
-      offset = offset_;
-      return *this;
-    }
-
-    SpecializationMapEntry& setSize( size_t size_ )
-    {
-      size = size_;
-      return *this;
-    }
-
-    operator VkSpecializationMapEntry const&() const
-    {
-      return *reinterpret_cast<const VkSpecializationMapEntry*>(this);
-    }
-
-    operator VkSpecializationMapEntry &()
-    {
-      return *reinterpret_cast<VkSpecializationMapEntry*>(this);
-    }
-
-    bool operator==( SpecializationMapEntry const& rhs ) const
-    {
-      return ( constantID == rhs.constantID )
-          && ( offset == rhs.offset )
-          && ( size == rhs.size );
-    }
-
-    bool operator!=( SpecializationMapEntry const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    uint32_t constantID;
-    uint32_t offset;
-    size_t size;
-  };
-  static_assert( sizeof( SpecializationMapEntry ) == sizeof( VkSpecializationMapEntry ), "struct and wrapper have different size!" );
-
-  struct SpecializationInfo
-  {
-    SpecializationInfo( uint32_t mapEntryCount_ = 0,
-                        const SpecializationMapEntry* pMapEntries_ = nullptr,
-                        size_t dataSize_ = 0,
-                        const void* pData_ = nullptr )
-      : mapEntryCount( mapEntryCount_ )
-      , pMapEntries( pMapEntries_ )
-      , dataSize( dataSize_ )
-      , pData( pData_ )
-    {
-    }
-
-    SpecializationInfo( VkSpecializationInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( SpecializationInfo ) );
-    }
-
-    SpecializationInfo& operator=( VkSpecializationInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( SpecializationInfo ) );
-      return *this;
-    }
-    SpecializationInfo& setMapEntryCount( uint32_t mapEntryCount_ )
-    {
-      mapEntryCount = mapEntryCount_;
-      return *this;
-    }
-
-    SpecializationInfo& setPMapEntries( const SpecializationMapEntry* pMapEntries_ )
-    {
-      pMapEntries = pMapEntries_;
-      return *this;
-    }
-
-    SpecializationInfo& setDataSize( size_t dataSize_ )
-    {
-      dataSize = dataSize_;
-      return *this;
-    }
-
-    SpecializationInfo& setPData( const void* pData_ )
-    {
-      pData = pData_;
-      return *this;
-    }
-
-    operator VkSpecializationInfo const&() const
-    {
-      return *reinterpret_cast<const VkSpecializationInfo*>(this);
-    }
-
-    operator VkSpecializationInfo &()
-    {
-      return *reinterpret_cast<VkSpecializationInfo*>(this);
-    }
-
-    bool operator==( SpecializationInfo const& rhs ) const
-    {
-      return ( mapEntryCount == rhs.mapEntryCount )
-          && ( pMapEntries == rhs.pMapEntries )
-          && ( dataSize == rhs.dataSize )
-          && ( pData == rhs.pData );
-    }
-
-    bool operator!=( SpecializationInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    uint32_t mapEntryCount;
-    const SpecializationMapEntry* pMapEntries;
-    size_t dataSize;
-    const void* pData;
-  };
-  static_assert( sizeof( SpecializationInfo ) == sizeof( VkSpecializationInfo ), "struct and wrapper have different size!" );
-
-  union ClearColorValue
-  {
-    ClearColorValue( const std::array<float,4>& float32_ = { {0} } )
-    {
-      memcpy( &float32, float32_.data(), 4 * sizeof( float ) );
-    }
-
-    ClearColorValue( const std::array<int32_t,4>& int32_ )
-    {
-      memcpy( &int32, int32_.data(), 4 * sizeof( int32_t ) );
-    }
-
-    ClearColorValue( const std::array<uint32_t,4>& uint32_ )
-    {
-      memcpy( &uint32, uint32_.data(), 4 * sizeof( uint32_t ) );
-    }
-
-    ClearColorValue& setFloat32( std::array<float,4> float32_ )
-    {
-      memcpy( &float32, float32_.data(), 4 * sizeof( float ) );
-      return *this;
-    }
-
-    ClearColorValue& setInt32( std::array<int32_t,4> int32_ )
-    {
-      memcpy( &int32, int32_.data(), 4 * sizeof( int32_t ) );
-      return *this;
-    }
-
-    ClearColorValue& setUint32( std::array<uint32_t,4> uint32_ )
-    {
-      memcpy( &uint32, uint32_.data(), 4 * sizeof( uint32_t ) );
-      return *this;
-    }
-
-    operator VkClearColorValue const&() const
-    {
-      return *reinterpret_cast<const VkClearColorValue*>(this);
-    }
-
-    operator VkClearColorValue &()
-    {
-      return *reinterpret_cast<VkClearColorValue*>(this);
-    }
-
-    float float32[4];
-    int32_t int32[4];
-    uint32_t uint32[4];
-  };
-
-  struct ClearDepthStencilValue
-  {
-    ClearDepthStencilValue( float depth_ = 0,
-                            uint32_t stencil_ = 0 )
-      : depth( depth_ )
-      , stencil( stencil_ )
-    {
-    }
-
-    ClearDepthStencilValue( VkClearDepthStencilValue const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ClearDepthStencilValue ) );
-    }
-
-    ClearDepthStencilValue& operator=( VkClearDepthStencilValue const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ClearDepthStencilValue ) );
-      return *this;
-    }
-    ClearDepthStencilValue& setDepth( float depth_ )
-    {
-      depth = depth_;
-      return *this;
-    }
-
-    ClearDepthStencilValue& setStencil( uint32_t stencil_ )
-    {
-      stencil = stencil_;
-      return *this;
-    }
-
-    operator VkClearDepthStencilValue const&() const
-    {
-      return *reinterpret_cast<const VkClearDepthStencilValue*>(this);
-    }
-
-    operator VkClearDepthStencilValue &()
-    {
-      return *reinterpret_cast<VkClearDepthStencilValue*>(this);
-    }
-
-    bool operator==( ClearDepthStencilValue const& rhs ) const
-    {
-      return ( depth == rhs.depth )
-          && ( stencil == rhs.stencil );
-    }
-
-    bool operator!=( ClearDepthStencilValue const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    float depth;
-    uint32_t stencil;
-  };
-  static_assert( sizeof( ClearDepthStencilValue ) == sizeof( VkClearDepthStencilValue ), "struct and wrapper have different size!" );
-
-  union ClearValue
-  {
-    ClearValue( ClearColorValue color_ = ClearColorValue() )
-    {
-      color = color_;
-    }
-
-    ClearValue( ClearDepthStencilValue depthStencil_ )
-    {
-      depthStencil = depthStencil_;
-    }
-
-    ClearValue& setColor( ClearColorValue color_ )
-    {
-      color = color_;
-      return *this;
-    }
-
-    ClearValue& setDepthStencil( ClearDepthStencilValue depthStencil_ )
-    {
-      depthStencil = depthStencil_;
-      return *this;
-    }
-
-    operator VkClearValue const&() const
-    {
-      return *reinterpret_cast<const VkClearValue*>(this);
-    }
-
-    operator VkClearValue &()
-    {
-      return *reinterpret_cast<VkClearValue*>(this);
-    }
-
-#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
-    ClearColorValue color;
-    ClearDepthStencilValue depthStencil;
-#else
-    VkClearColorValue color;
-    VkClearDepthStencilValue depthStencil;
-#endif  // VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
-  };
-
-  struct PhysicalDeviceFeatures
-  {
-    PhysicalDeviceFeatures( Bool32 robustBufferAccess_ = 0,
-                            Bool32 fullDrawIndexUint32_ = 0,
-                            Bool32 imageCubeArray_ = 0,
-                            Bool32 independentBlend_ = 0,
-                            Bool32 geometryShader_ = 0,
-                            Bool32 tessellationShader_ = 0,
-                            Bool32 sampleRateShading_ = 0,
-                            Bool32 dualSrcBlend_ = 0,
-                            Bool32 logicOp_ = 0,
-                            Bool32 multiDrawIndirect_ = 0,
-                            Bool32 drawIndirectFirstInstance_ = 0,
-                            Bool32 depthClamp_ = 0,
-                            Bool32 depthBiasClamp_ = 0,
-                            Bool32 fillModeNonSolid_ = 0,
-                            Bool32 depthBounds_ = 0,
-                            Bool32 wideLines_ = 0,
-                            Bool32 largePoints_ = 0,
-                            Bool32 alphaToOne_ = 0,
-                            Bool32 multiViewport_ = 0,
-                            Bool32 samplerAnisotropy_ = 0,
-                            Bool32 textureCompressionETC2_ = 0,
-                            Bool32 textureCompressionASTC_LDR_ = 0,
-                            Bool32 textureCompressionBC_ = 0,
-                            Bool32 occlusionQueryPrecise_ = 0,
-                            Bool32 pipelineStatisticsQuery_ = 0,
-                            Bool32 vertexPipelineStoresAndAtomics_ = 0,
-                            Bool32 fragmentStoresAndAtomics_ = 0,
-                            Bool32 shaderTessellationAndGeometryPointSize_ = 0,
-                            Bool32 shaderImageGatherExtended_ = 0,
-                            Bool32 shaderStorageImageExtendedFormats_ = 0,
-                            Bool32 shaderStorageImageMultisample_ = 0,
-                            Bool32 shaderStorageImageReadWithoutFormat_ = 0,
-                            Bool32 shaderStorageImageWriteWithoutFormat_ = 0,
-                            Bool32 shaderUniformBufferArrayDynamicIndexing_ = 0,
-                            Bool32 shaderSampledImageArrayDynamicIndexing_ = 0,
-                            Bool32 shaderStorageBufferArrayDynamicIndexing_ = 0,
-                            Bool32 shaderStorageImageArrayDynamicIndexing_ = 0,
-                            Bool32 shaderClipDistance_ = 0,
-                            Bool32 shaderCullDistance_ = 0,
-                            Bool32 shaderFloat64_ = 0,
-                            Bool32 shaderInt64_ = 0,
-                            Bool32 shaderInt16_ = 0,
-                            Bool32 shaderResourceResidency_ = 0,
-                            Bool32 shaderResourceMinLod_ = 0,
-                            Bool32 sparseBinding_ = 0,
-                            Bool32 sparseResidencyBuffer_ = 0,
-                            Bool32 sparseResidencyImage2D_ = 0,
-                            Bool32 sparseResidencyImage3D_ = 0,
-                            Bool32 sparseResidency2Samples_ = 0,
-                            Bool32 sparseResidency4Samples_ = 0,
-                            Bool32 sparseResidency8Samples_ = 0,
-                            Bool32 sparseResidency16Samples_ = 0,
-                            Bool32 sparseResidencyAliased_ = 0,
-                            Bool32 variableMultisampleRate_ = 0,
-                            Bool32 inheritedQueries_ = 0 )
-      : robustBufferAccess( robustBufferAccess_ )
-      , fullDrawIndexUint32( fullDrawIndexUint32_ )
-      , imageCubeArray( imageCubeArray_ )
-      , independentBlend( independentBlend_ )
-      , geometryShader( geometryShader_ )
-      , tessellationShader( tessellationShader_ )
-      , sampleRateShading( sampleRateShading_ )
-      , dualSrcBlend( dualSrcBlend_ )
-      , logicOp( logicOp_ )
-      , multiDrawIndirect( multiDrawIndirect_ )
-      , drawIndirectFirstInstance( drawIndirectFirstInstance_ )
-      , depthClamp( depthClamp_ )
-      , depthBiasClamp( depthBiasClamp_ )
-      , fillModeNonSolid( fillModeNonSolid_ )
-      , depthBounds( depthBounds_ )
-      , wideLines( wideLines_ )
-      , largePoints( largePoints_ )
-      , alphaToOne( alphaToOne_ )
-      , multiViewport( multiViewport_ )
-      , samplerAnisotropy( samplerAnisotropy_ )
-      , textureCompressionETC2( textureCompressionETC2_ )
-      , textureCompressionASTC_LDR( textureCompressionASTC_LDR_ )
-      , textureCompressionBC( textureCompressionBC_ )
-      , occlusionQueryPrecise( occlusionQueryPrecise_ )
-      , pipelineStatisticsQuery( pipelineStatisticsQuery_ )
-      , vertexPipelineStoresAndAtomics( vertexPipelineStoresAndAtomics_ )
-      , fragmentStoresAndAtomics( fragmentStoresAndAtomics_ )
-      , shaderTessellationAndGeometryPointSize( shaderTessellationAndGeometryPointSize_ )
-      , shaderImageGatherExtended( shaderImageGatherExtended_ )
-      , shaderStorageImageExtendedFormats( shaderStorageImageExtendedFormats_ )
-      , shaderStorageImageMultisample( shaderStorageImageMultisample_ )
-      , shaderStorageImageReadWithoutFormat( shaderStorageImageReadWithoutFormat_ )
-      , shaderStorageImageWriteWithoutFormat( shaderStorageImageWriteWithoutFormat_ )
-      , shaderUniformBufferArrayDynamicIndexing( shaderUniformBufferArrayDynamicIndexing_ )
-      , shaderSampledImageArrayDynamicIndexing( shaderSampledImageArrayDynamicIndexing_ )
-      , shaderStorageBufferArrayDynamicIndexing( shaderStorageBufferArrayDynamicIndexing_ )
-      , shaderStorageImageArrayDynamicIndexing( shaderStorageImageArrayDynamicIndexing_ )
-      , shaderClipDistance( shaderClipDistance_ )
-      , shaderCullDistance( shaderCullDistance_ )
-      , shaderFloat64( shaderFloat64_ )
-      , shaderInt64( shaderInt64_ )
-      , shaderInt16( shaderInt16_ )
-      , shaderResourceResidency( shaderResourceResidency_ )
-      , shaderResourceMinLod( shaderResourceMinLod_ )
-      , sparseBinding( sparseBinding_ )
-      , sparseResidencyBuffer( sparseResidencyBuffer_ )
-      , sparseResidencyImage2D( sparseResidencyImage2D_ )
-      , sparseResidencyImage3D( sparseResidencyImage3D_ )
-      , sparseResidency2Samples( sparseResidency2Samples_ )
-      , sparseResidency4Samples( sparseResidency4Samples_ )
-      , sparseResidency8Samples( sparseResidency8Samples_ )
-      , sparseResidency16Samples( sparseResidency16Samples_ )
-      , sparseResidencyAliased( sparseResidencyAliased_ )
-      , variableMultisampleRate( variableMultisampleRate_ )
-      , inheritedQueries( inheritedQueries_ )
-    {
-    }
-
-    PhysicalDeviceFeatures( VkPhysicalDeviceFeatures const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceFeatures ) );
-    }
-
-    PhysicalDeviceFeatures& operator=( VkPhysicalDeviceFeatures const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceFeatures ) );
-      return *this;
-    }
-    PhysicalDeviceFeatures& setRobustBufferAccess( Bool32 robustBufferAccess_ )
-    {
-      robustBufferAccess = robustBufferAccess_;
-      return *this;
-    }
-
-    PhysicalDeviceFeatures& setFullDrawIndexUint32( Bool32 fullDrawIndexUint32_ )
-    {
-      fullDrawIndexUint32 = fullDrawIndexUint32_;
-      return *this;
-    }
-
-    PhysicalDeviceFeatures& setImageCubeArray( Bool32 imageCubeArray_ )
-    {
-      imageCubeArray = imageCubeArray_;
-      return *this;
-    }
-
-    PhysicalDeviceFeatures& setIndependentBlend( Bool32 independentBlend_ )
-    {
-      independentBlend = independentBlend_;
-      return *this;
-    }
-
-    PhysicalDeviceFeatures& setGeometryShader( Bool32 geometryShader_ )
-    {
-      geometryShader = geometryShader_;
-      return *this;
-    }
-
-    PhysicalDeviceFeatures& setTessellationShader( Bool32 tessellationShader_ )
-    {
-      tessellationShader = tessellationShader_;
-      return *this;
-    }
-
-    PhysicalDeviceFeatures& setSampleRateShading( Bool32 sampleRateShading_ )
-    {
-      sampleRateShading = sampleRateShading_;
-      return *this;
-    }
-
-    PhysicalDeviceFeatures& setDualSrcBlend( Bool32 dualSrcBlend_ )
-    {
-      dualSrcBlend = dualSrcBlend_;
-      return *this;
-    }
-
-    PhysicalDeviceFeatures& setLogicOp( Bool32 logicOp_ )
-    {
-      logicOp = logicOp_;
-      return *this;
-    }
-
-    PhysicalDeviceFeatures& setMultiDrawIndirect( Bool32 multiDrawIndirect_ )
-    {
-      multiDrawIndirect = multiDrawIndirect_;
-      return *this;
-    }
-
-    PhysicalDeviceFeatures& setDrawIndirectFirstInstance( Bool32 drawIndirectFirstInstance_ )
-    {
-      drawIndirectFirstInstance = drawIndirectFirstInstance_;
-      return *this;
-    }
-
-    PhysicalDeviceFeatures& setDepthClamp( Bool32 depthClamp_ )
-    {
-      depthClamp = depthClamp_;
-      return *this;
-    }
-
-    PhysicalDeviceFeatures& setDepthBiasClamp( Bool32 depthBiasClamp_ )
-    {
-      depthBiasClamp = depthBiasClamp_;
-      return *this;
-    }
-
-    PhysicalDeviceFeatures& setFillModeNonSolid( Bool32 fillModeNonSolid_ )
-    {
-      fillModeNonSolid = fillModeNonSolid_;
-      return *this;
-    }
-
-    PhysicalDeviceFeatures& setDepthBounds( Bool32 depthBounds_ )
-    {
-      depthBounds = depthBounds_;
-      return *this;
-    }
-
-    PhysicalDeviceFeatures& setWideLines( Bool32 wideLines_ )
-    {
-      wideLines = wideLines_;
-      return *this;
-    }
-
-    PhysicalDeviceFeatures& setLargePoints( Bool32 largePoints_ )
-    {
-      largePoints = largePoints_;
-      return *this;
-    }
-
-    PhysicalDeviceFeatures& setAlphaToOne( Bool32 alphaToOne_ )
-    {
-      alphaToOne = alphaToOne_;
-      return *this;
-    }
-
-    PhysicalDeviceFeatures& setMultiViewport( Bool32 multiViewport_ )
-    {
-      multiViewport = multiViewport_;
-      return *this;
-    }
-
-    PhysicalDeviceFeatures& setSamplerAnisotropy( Bool32 samplerAnisotropy_ )
-    {
-      samplerAnisotropy = samplerAnisotropy_;
-      return *this;
-    }
-
-    PhysicalDeviceFeatures& setTextureCompressionETC2( Bool32 textureCompressionETC2_ )
-    {
-      textureCompressionETC2 = textureCompressionETC2_;
-      return *this;
-    }
-
-    PhysicalDeviceFeatures& setTextureCompressionASTC_LDR( Bool32 textureCompressionASTC_LDR_ )
-    {
-      textureCompressionASTC_LDR = textureCompressionASTC_LDR_;
-      return *this;
-    }
-
-    PhysicalDeviceFeatures& setTextureCompressionBC( Bool32 textureCompressionBC_ )
-    {
-      textureCompressionBC = textureCompressionBC_;
-      return *this;
-    }
-
-    PhysicalDeviceFeatures& setOcclusionQueryPrecise( Bool32 occlusionQueryPrecise_ )
-    {
-      occlusionQueryPrecise = occlusionQueryPrecise_;
-      return *this;
-    }
-
-    PhysicalDeviceFeatures& setPipelineStatisticsQuery( Bool32 pipelineStatisticsQuery_ )
-    {
-      pipelineStatisticsQuery = pipelineStatisticsQuery_;
-      return *this;
-    }
-
-    PhysicalDeviceFeatures& setVertexPipelineStoresAndAtomics( Bool32 vertexPipelineStoresAndAtomics_ )
-    {
-      vertexPipelineStoresAndAtomics = vertexPipelineStoresAndAtomics_;
-      return *this;
-    }
-
-    PhysicalDeviceFeatures& setFragmentStoresAndAtomics( Bool32 fragmentStoresAndAtomics_ )
-    {
-      fragmentStoresAndAtomics = fragmentStoresAndAtomics_;
-      return *this;
-    }
-
-    PhysicalDeviceFeatures& setShaderTessellationAndGeometryPointSize( Bool32 shaderTessellationAndGeometryPointSize_ )
-    {
-      shaderTessellationAndGeometryPointSize = shaderTessellationAndGeometryPointSize_;
-      return *this;
-    }
-
-    PhysicalDeviceFeatures& setShaderImageGatherExtended( Bool32 shaderImageGatherExtended_ )
-    {
-      shaderImageGatherExtended = shaderImageGatherExtended_;
-      return *this;
-    }
-
-    PhysicalDeviceFeatures& setShaderStorageImageExtendedFormats( Bool32 shaderStorageImageExtendedFormats_ )
-    {
-      shaderStorageImageExtendedFormats = shaderStorageImageExtendedFormats_;
-      return *this;
-    }
-
-    PhysicalDeviceFeatures& setShaderStorageImageMultisample( Bool32 shaderStorageImageMultisample_ )
-    {
-      shaderStorageImageMultisample = shaderStorageImageMultisample_;
-      return *this;
-    }
-
-    PhysicalDeviceFeatures& setShaderStorageImageReadWithoutFormat( Bool32 shaderStorageImageReadWithoutFormat_ )
-    {
-      shaderStorageImageReadWithoutFormat = shaderStorageImageReadWithoutFormat_;
-      return *this;
-    }
-
-    PhysicalDeviceFeatures& setShaderStorageImageWriteWithoutFormat( Bool32 shaderStorageImageWriteWithoutFormat_ )
-    {
-      shaderStorageImageWriteWithoutFormat = shaderStorageImageWriteWithoutFormat_;
-      return *this;
-    }
-
-    PhysicalDeviceFeatures& setShaderUniformBufferArrayDynamicIndexing( Bool32 shaderUniformBufferArrayDynamicIndexing_ )
-    {
-      shaderUniformBufferArrayDynamicIndexing = shaderUniformBufferArrayDynamicIndexing_;
-      return *this;
-    }
-
-    PhysicalDeviceFeatures& setShaderSampledImageArrayDynamicIndexing( Bool32 shaderSampledImageArrayDynamicIndexing_ )
-    {
-      shaderSampledImageArrayDynamicIndexing = shaderSampledImageArrayDynamicIndexing_;
-      return *this;
-    }
-
-    PhysicalDeviceFeatures& setShaderStorageBufferArrayDynamicIndexing( Bool32 shaderStorageBufferArrayDynamicIndexing_ )
-    {
-      shaderStorageBufferArrayDynamicIndexing = shaderStorageBufferArrayDynamicIndexing_;
-      return *this;
-    }
-
-    PhysicalDeviceFeatures& setShaderStorageImageArrayDynamicIndexing( Bool32 shaderStorageImageArrayDynamicIndexing_ )
-    {
-      shaderStorageImageArrayDynamicIndexing = shaderStorageImageArrayDynamicIndexing_;
-      return *this;
-    }
-
-    PhysicalDeviceFeatures& setShaderClipDistance( Bool32 shaderClipDistance_ )
-    {
-      shaderClipDistance = shaderClipDistance_;
-      return *this;
-    }
-
-    PhysicalDeviceFeatures& setShaderCullDistance( Bool32 shaderCullDistance_ )
-    {
-      shaderCullDistance = shaderCullDistance_;
-      return *this;
-    }
-
-    PhysicalDeviceFeatures& setShaderFloat64( Bool32 shaderFloat64_ )
-    {
-      shaderFloat64 = shaderFloat64_;
-      return *this;
-    }
-
-    PhysicalDeviceFeatures& setShaderInt64( Bool32 shaderInt64_ )
-    {
-      shaderInt64 = shaderInt64_;
-      return *this;
-    }
-
-    PhysicalDeviceFeatures& setShaderInt16( Bool32 shaderInt16_ )
-    {
-      shaderInt16 = shaderInt16_;
-      return *this;
-    }
-
-    PhysicalDeviceFeatures& setShaderResourceResidency( Bool32 shaderResourceResidency_ )
-    {
-      shaderResourceResidency = shaderResourceResidency_;
-      return *this;
-    }
-
-    PhysicalDeviceFeatures& setShaderResourceMinLod( Bool32 shaderResourceMinLod_ )
-    {
-      shaderResourceMinLod = shaderResourceMinLod_;
-      return *this;
-    }
-
-    PhysicalDeviceFeatures& setSparseBinding( Bool32 sparseBinding_ )
-    {
-      sparseBinding = sparseBinding_;
-      return *this;
-    }
-
-    PhysicalDeviceFeatures& setSparseResidencyBuffer( Bool32 sparseResidencyBuffer_ )
-    {
-      sparseResidencyBuffer = sparseResidencyBuffer_;
-      return *this;
-    }
-
-    PhysicalDeviceFeatures& setSparseResidencyImage2D( Bool32 sparseResidencyImage2D_ )
-    {
-      sparseResidencyImage2D = sparseResidencyImage2D_;
-      return *this;
-    }
-
-    PhysicalDeviceFeatures& setSparseResidencyImage3D( Bool32 sparseResidencyImage3D_ )
-    {
-      sparseResidencyImage3D = sparseResidencyImage3D_;
-      return *this;
-    }
-
-    PhysicalDeviceFeatures& setSparseResidency2Samples( Bool32 sparseResidency2Samples_ )
-    {
-      sparseResidency2Samples = sparseResidency2Samples_;
-      return *this;
-    }
-
-    PhysicalDeviceFeatures& setSparseResidency4Samples( Bool32 sparseResidency4Samples_ )
-    {
-      sparseResidency4Samples = sparseResidency4Samples_;
-      return *this;
-    }
-
-    PhysicalDeviceFeatures& setSparseResidency8Samples( Bool32 sparseResidency8Samples_ )
-    {
-      sparseResidency8Samples = sparseResidency8Samples_;
-      return *this;
-    }
-
-    PhysicalDeviceFeatures& setSparseResidency16Samples( Bool32 sparseResidency16Samples_ )
-    {
-      sparseResidency16Samples = sparseResidency16Samples_;
-      return *this;
-    }
-
-    PhysicalDeviceFeatures& setSparseResidencyAliased( Bool32 sparseResidencyAliased_ )
-    {
-      sparseResidencyAliased = sparseResidencyAliased_;
-      return *this;
-    }
-
-    PhysicalDeviceFeatures& setVariableMultisampleRate( Bool32 variableMultisampleRate_ )
-    {
-      variableMultisampleRate = variableMultisampleRate_;
-      return *this;
-    }
-
-    PhysicalDeviceFeatures& setInheritedQueries( Bool32 inheritedQueries_ )
-    {
-      inheritedQueries = inheritedQueries_;
-      return *this;
-    }
-
-    operator VkPhysicalDeviceFeatures const&() const
-    {
-      return *reinterpret_cast<const VkPhysicalDeviceFeatures*>(this);
-    }
-
-    operator VkPhysicalDeviceFeatures &()
-    {
-      return *reinterpret_cast<VkPhysicalDeviceFeatures*>(this);
-    }
-
-    bool operator==( PhysicalDeviceFeatures const& rhs ) const
-    {
-      return ( robustBufferAccess == rhs.robustBufferAccess )
-          && ( fullDrawIndexUint32 == rhs.fullDrawIndexUint32 )
-          && ( imageCubeArray == rhs.imageCubeArray )
-          && ( independentBlend == rhs.independentBlend )
-          && ( geometryShader == rhs.geometryShader )
-          && ( tessellationShader == rhs.tessellationShader )
-          && ( sampleRateShading == rhs.sampleRateShading )
-          && ( dualSrcBlend == rhs.dualSrcBlend )
-          && ( logicOp == rhs.logicOp )
-          && ( multiDrawIndirect == rhs.multiDrawIndirect )
-          && ( drawIndirectFirstInstance == rhs.drawIndirectFirstInstance )
-          && ( depthClamp == rhs.depthClamp )
-          && ( depthBiasClamp == rhs.depthBiasClamp )
-          && ( fillModeNonSolid == rhs.fillModeNonSolid )
-          && ( depthBounds == rhs.depthBounds )
-          && ( wideLines == rhs.wideLines )
-          && ( largePoints == rhs.largePoints )
-          && ( alphaToOne == rhs.alphaToOne )
-          && ( multiViewport == rhs.multiViewport )
-          && ( samplerAnisotropy == rhs.samplerAnisotropy )
-          && ( textureCompressionETC2 == rhs.textureCompressionETC2 )
-          && ( textureCompressionASTC_LDR == rhs.textureCompressionASTC_LDR )
-          && ( textureCompressionBC == rhs.textureCompressionBC )
-          && ( occlusionQueryPrecise == rhs.occlusionQueryPrecise )
-          && ( pipelineStatisticsQuery == rhs.pipelineStatisticsQuery )
-          && ( vertexPipelineStoresAndAtomics == rhs.vertexPipelineStoresAndAtomics )
-          && ( fragmentStoresAndAtomics == rhs.fragmentStoresAndAtomics )
-          && ( shaderTessellationAndGeometryPointSize == rhs.shaderTessellationAndGeometryPointSize )
-          && ( shaderImageGatherExtended == rhs.shaderImageGatherExtended )
-          && ( shaderStorageImageExtendedFormats == rhs.shaderStorageImageExtendedFormats )
-          && ( shaderStorageImageMultisample == rhs.shaderStorageImageMultisample )
-          && ( shaderStorageImageReadWithoutFormat == rhs.shaderStorageImageReadWithoutFormat )
-          && ( shaderStorageImageWriteWithoutFormat == rhs.shaderStorageImageWriteWithoutFormat )
-          && ( shaderUniformBufferArrayDynamicIndexing == rhs.shaderUniformBufferArrayDynamicIndexing )
-          && ( shaderSampledImageArrayDynamicIndexing == rhs.shaderSampledImageArrayDynamicIndexing )
-          && ( shaderStorageBufferArrayDynamicIndexing == rhs.shaderStorageBufferArrayDynamicIndexing )
-          && ( shaderStorageImageArrayDynamicIndexing == rhs.shaderStorageImageArrayDynamicIndexing )
-          && ( shaderClipDistance == rhs.shaderClipDistance )
-          && ( shaderCullDistance == rhs.shaderCullDistance )
-          && ( shaderFloat64 == rhs.shaderFloat64 )
-          && ( shaderInt64 == rhs.shaderInt64 )
-          && ( shaderInt16 == rhs.shaderInt16 )
-          && ( shaderResourceResidency == rhs.shaderResourceResidency )
-          && ( shaderResourceMinLod == rhs.shaderResourceMinLod )
-          && ( sparseBinding == rhs.sparseBinding )
-          && ( sparseResidencyBuffer == rhs.sparseResidencyBuffer )
-          && ( sparseResidencyImage2D == rhs.sparseResidencyImage2D )
-          && ( sparseResidencyImage3D == rhs.sparseResidencyImage3D )
-          && ( sparseResidency2Samples == rhs.sparseResidency2Samples )
-          && ( sparseResidency4Samples == rhs.sparseResidency4Samples )
-          && ( sparseResidency8Samples == rhs.sparseResidency8Samples )
-          && ( sparseResidency16Samples == rhs.sparseResidency16Samples )
-          && ( sparseResidencyAliased == rhs.sparseResidencyAliased )
-          && ( variableMultisampleRate == rhs.variableMultisampleRate )
-          && ( inheritedQueries == rhs.inheritedQueries );
-    }
-
-    bool operator!=( PhysicalDeviceFeatures const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    Bool32 robustBufferAccess;
-    Bool32 fullDrawIndexUint32;
-    Bool32 imageCubeArray;
-    Bool32 independentBlend;
-    Bool32 geometryShader;
-    Bool32 tessellationShader;
-    Bool32 sampleRateShading;
-    Bool32 dualSrcBlend;
-    Bool32 logicOp;
-    Bool32 multiDrawIndirect;
-    Bool32 drawIndirectFirstInstance;
-    Bool32 depthClamp;
-    Bool32 depthBiasClamp;
-    Bool32 fillModeNonSolid;
-    Bool32 depthBounds;
-    Bool32 wideLines;
-    Bool32 largePoints;
-    Bool32 alphaToOne;
-    Bool32 multiViewport;
-    Bool32 samplerAnisotropy;
-    Bool32 textureCompressionETC2;
-    Bool32 textureCompressionASTC_LDR;
-    Bool32 textureCompressionBC;
-    Bool32 occlusionQueryPrecise;
-    Bool32 pipelineStatisticsQuery;
-    Bool32 vertexPipelineStoresAndAtomics;
-    Bool32 fragmentStoresAndAtomics;
-    Bool32 shaderTessellationAndGeometryPointSize;
-    Bool32 shaderImageGatherExtended;
-    Bool32 shaderStorageImageExtendedFormats;
-    Bool32 shaderStorageImageMultisample;
-    Bool32 shaderStorageImageReadWithoutFormat;
-    Bool32 shaderStorageImageWriteWithoutFormat;
-    Bool32 shaderUniformBufferArrayDynamicIndexing;
-    Bool32 shaderSampledImageArrayDynamicIndexing;
-    Bool32 shaderStorageBufferArrayDynamicIndexing;
-    Bool32 shaderStorageImageArrayDynamicIndexing;
-    Bool32 shaderClipDistance;
-    Bool32 shaderCullDistance;
-    Bool32 shaderFloat64;
-    Bool32 shaderInt64;
-    Bool32 shaderInt16;
-    Bool32 shaderResourceResidency;
-    Bool32 shaderResourceMinLod;
-    Bool32 sparseBinding;
-    Bool32 sparseResidencyBuffer;
-    Bool32 sparseResidencyImage2D;
-    Bool32 sparseResidencyImage3D;
-    Bool32 sparseResidency2Samples;
-    Bool32 sparseResidency4Samples;
-    Bool32 sparseResidency8Samples;
-    Bool32 sparseResidency16Samples;
-    Bool32 sparseResidencyAliased;
-    Bool32 variableMultisampleRate;
-    Bool32 inheritedQueries;
-  };
-  static_assert( sizeof( PhysicalDeviceFeatures ) == sizeof( VkPhysicalDeviceFeatures ), "struct and wrapper have different size!" );
-
-  struct PhysicalDeviceSparseProperties
-  {
-    operator VkPhysicalDeviceSparseProperties const&() const
-    {
-      return *reinterpret_cast<const VkPhysicalDeviceSparseProperties*>(this);
-    }
-
-    operator VkPhysicalDeviceSparseProperties &()
-    {
-      return *reinterpret_cast<VkPhysicalDeviceSparseProperties*>(this);
-    }
-
-    bool operator==( PhysicalDeviceSparseProperties const& rhs ) const
-    {
-      return ( residencyStandard2DBlockShape == rhs.residencyStandard2DBlockShape )
-          && ( residencyStandard2DMultisampleBlockShape == rhs.residencyStandard2DMultisampleBlockShape )
-          && ( residencyStandard3DBlockShape == rhs.residencyStandard3DBlockShape )
-          && ( residencyAlignedMipSize == rhs.residencyAlignedMipSize )
-          && ( residencyNonResidentStrict == rhs.residencyNonResidentStrict );
-    }
-
-    bool operator!=( PhysicalDeviceSparseProperties const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    Bool32 residencyStandard2DBlockShape;
-    Bool32 residencyStandard2DMultisampleBlockShape;
-    Bool32 residencyStandard3DBlockShape;
-    Bool32 residencyAlignedMipSize;
-    Bool32 residencyNonResidentStrict;
-  };
-  static_assert( sizeof( PhysicalDeviceSparseProperties ) == sizeof( VkPhysicalDeviceSparseProperties ), "struct and wrapper have different size!" );
-
-  struct DrawIndirectCommand
-  {
-    DrawIndirectCommand( uint32_t vertexCount_ = 0,
-                         uint32_t instanceCount_ = 0,
-                         uint32_t firstVertex_ = 0,
-                         uint32_t firstInstance_ = 0 )
-      : vertexCount( vertexCount_ )
-      , instanceCount( instanceCount_ )
-      , firstVertex( firstVertex_ )
-      , firstInstance( firstInstance_ )
-    {
-    }
-
-    DrawIndirectCommand( VkDrawIndirectCommand const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DrawIndirectCommand ) );
-    }
-
-    DrawIndirectCommand& operator=( VkDrawIndirectCommand const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DrawIndirectCommand ) );
-      return *this;
-    }
-    DrawIndirectCommand& setVertexCount( uint32_t vertexCount_ )
-    {
-      vertexCount = vertexCount_;
-      return *this;
-    }
-
-    DrawIndirectCommand& setInstanceCount( uint32_t instanceCount_ )
-    {
-      instanceCount = instanceCount_;
-      return *this;
-    }
-
-    DrawIndirectCommand& setFirstVertex( uint32_t firstVertex_ )
-    {
-      firstVertex = firstVertex_;
-      return *this;
-    }
-
-    DrawIndirectCommand& setFirstInstance( uint32_t firstInstance_ )
-    {
-      firstInstance = firstInstance_;
-      return *this;
-    }
-
-    operator VkDrawIndirectCommand const&() const
-    {
-      return *reinterpret_cast<const VkDrawIndirectCommand*>(this);
-    }
-
-    operator VkDrawIndirectCommand &()
-    {
-      return *reinterpret_cast<VkDrawIndirectCommand*>(this);
-    }
-
-    bool operator==( DrawIndirectCommand const& rhs ) const
-    {
-      return ( vertexCount == rhs.vertexCount )
-          && ( instanceCount == rhs.instanceCount )
-          && ( firstVertex == rhs.firstVertex )
-          && ( firstInstance == rhs.firstInstance );
-    }
-
-    bool operator!=( DrawIndirectCommand const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    uint32_t vertexCount;
-    uint32_t instanceCount;
-    uint32_t firstVertex;
-    uint32_t firstInstance;
-  };
-  static_assert( sizeof( DrawIndirectCommand ) == sizeof( VkDrawIndirectCommand ), "struct and wrapper have different size!" );
-
-  struct DrawIndexedIndirectCommand
-  {
-    DrawIndexedIndirectCommand( uint32_t indexCount_ = 0,
-                                uint32_t instanceCount_ = 0,
-                                uint32_t firstIndex_ = 0,
-                                int32_t vertexOffset_ = 0,
-                                uint32_t firstInstance_ = 0 )
-      : indexCount( indexCount_ )
-      , instanceCount( instanceCount_ )
-      , firstIndex( firstIndex_ )
-      , vertexOffset( vertexOffset_ )
-      , firstInstance( firstInstance_ )
-    {
-    }
-
-    DrawIndexedIndirectCommand( VkDrawIndexedIndirectCommand const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DrawIndexedIndirectCommand ) );
-    }
-
-    DrawIndexedIndirectCommand& operator=( VkDrawIndexedIndirectCommand const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DrawIndexedIndirectCommand ) );
-      return *this;
-    }
-    DrawIndexedIndirectCommand& setIndexCount( uint32_t indexCount_ )
-    {
-      indexCount = indexCount_;
-      return *this;
-    }
-
-    DrawIndexedIndirectCommand& setInstanceCount( uint32_t instanceCount_ )
-    {
-      instanceCount = instanceCount_;
-      return *this;
-    }
-
-    DrawIndexedIndirectCommand& setFirstIndex( uint32_t firstIndex_ )
-    {
-      firstIndex = firstIndex_;
-      return *this;
-    }
-
-    DrawIndexedIndirectCommand& setVertexOffset( int32_t vertexOffset_ )
-    {
-      vertexOffset = vertexOffset_;
-      return *this;
-    }
-
-    DrawIndexedIndirectCommand& setFirstInstance( uint32_t firstInstance_ )
-    {
-      firstInstance = firstInstance_;
-      return *this;
-    }
-
-    operator VkDrawIndexedIndirectCommand const&() const
-    {
-      return *reinterpret_cast<const VkDrawIndexedIndirectCommand*>(this);
-    }
-
-    operator VkDrawIndexedIndirectCommand &()
-    {
-      return *reinterpret_cast<VkDrawIndexedIndirectCommand*>(this);
-    }
-
-    bool operator==( DrawIndexedIndirectCommand const& rhs ) const
-    {
-      return ( indexCount == rhs.indexCount )
-          && ( instanceCount == rhs.instanceCount )
-          && ( firstIndex == rhs.firstIndex )
-          && ( vertexOffset == rhs.vertexOffset )
-          && ( firstInstance == rhs.firstInstance );
-    }
-
-    bool operator!=( DrawIndexedIndirectCommand const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    uint32_t indexCount;
-    uint32_t instanceCount;
-    uint32_t firstIndex;
-    int32_t vertexOffset;
-    uint32_t firstInstance;
-  };
-  static_assert( sizeof( DrawIndexedIndirectCommand ) == sizeof( VkDrawIndexedIndirectCommand ), "struct and wrapper have different size!" );
-
-  struct DispatchIndirectCommand
-  {
-    DispatchIndirectCommand( uint32_t x_ = 0,
-                             uint32_t y_ = 0,
-                             uint32_t z_ = 0 )
-      : x( x_ )
-      , y( y_ )
-      , z( z_ )
-    {
-    }
-
-    DispatchIndirectCommand( VkDispatchIndirectCommand const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DispatchIndirectCommand ) );
-    }
-
-    DispatchIndirectCommand& operator=( VkDispatchIndirectCommand const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DispatchIndirectCommand ) );
-      return *this;
-    }
-    DispatchIndirectCommand& setX( uint32_t x_ )
-    {
-      x = x_;
-      return *this;
-    }
-
-    DispatchIndirectCommand& setY( uint32_t y_ )
-    {
-      y = y_;
-      return *this;
-    }
-
-    DispatchIndirectCommand& setZ( uint32_t z_ )
-    {
-      z = z_;
-      return *this;
-    }
-
-    operator VkDispatchIndirectCommand const&() const
-    {
-      return *reinterpret_cast<const VkDispatchIndirectCommand*>(this);
-    }
-
-    operator VkDispatchIndirectCommand &()
-    {
-      return *reinterpret_cast<VkDispatchIndirectCommand*>(this);
-    }
-
-    bool operator==( DispatchIndirectCommand const& rhs ) const
-    {
-      return ( x == rhs.x )
-          && ( y == rhs.y )
-          && ( z == rhs.z );
-    }
-
-    bool operator!=( DispatchIndirectCommand const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    uint32_t x;
-    uint32_t y;
-    uint32_t z;
-  };
-  static_assert( sizeof( DispatchIndirectCommand ) == sizeof( VkDispatchIndirectCommand ), "struct and wrapper have different size!" );
-
-  struct DisplayPlanePropertiesKHR
-  {
-    operator VkDisplayPlanePropertiesKHR const&() const
-    {
-      return *reinterpret_cast<const VkDisplayPlanePropertiesKHR*>(this);
-    }
-
-    operator VkDisplayPlanePropertiesKHR &()
-    {
-      return *reinterpret_cast<VkDisplayPlanePropertiesKHR*>(this);
-    }
-
-    bool operator==( DisplayPlanePropertiesKHR const& rhs ) const
-    {
-      return ( currentDisplay == rhs.currentDisplay )
-          && ( currentStackIndex == rhs.currentStackIndex );
-    }
-
-    bool operator!=( DisplayPlanePropertiesKHR const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    DisplayKHR currentDisplay;
-    uint32_t currentStackIndex;
-  };
-  static_assert( sizeof( DisplayPlanePropertiesKHR ) == sizeof( VkDisplayPlanePropertiesKHR ), "struct and wrapper have different size!" );
-
-  struct DisplayModeParametersKHR
-  {
-    DisplayModeParametersKHR( Extent2D visibleRegion_ = Extent2D(),
-                              uint32_t refreshRate_ = 0 )
-      : visibleRegion( visibleRegion_ )
-      , refreshRate( refreshRate_ )
-    {
-    }
-
-    DisplayModeParametersKHR( VkDisplayModeParametersKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DisplayModeParametersKHR ) );
-    }
-
-    DisplayModeParametersKHR& operator=( VkDisplayModeParametersKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DisplayModeParametersKHR ) );
-      return *this;
-    }
-    DisplayModeParametersKHR& setVisibleRegion( Extent2D visibleRegion_ )
-    {
-      visibleRegion = visibleRegion_;
-      return *this;
-    }
-
-    DisplayModeParametersKHR& setRefreshRate( uint32_t refreshRate_ )
-    {
-      refreshRate = refreshRate_;
-      return *this;
-    }
-
-    operator VkDisplayModeParametersKHR const&() const
-    {
-      return *reinterpret_cast<const VkDisplayModeParametersKHR*>(this);
-    }
-
-    operator VkDisplayModeParametersKHR &()
-    {
-      return *reinterpret_cast<VkDisplayModeParametersKHR*>(this);
-    }
-
-    bool operator==( DisplayModeParametersKHR const& rhs ) const
-    {
-      return ( visibleRegion == rhs.visibleRegion )
-          && ( refreshRate == rhs.refreshRate );
-    }
-
-    bool operator!=( DisplayModeParametersKHR const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    Extent2D visibleRegion;
-    uint32_t refreshRate;
-  };
-  static_assert( sizeof( DisplayModeParametersKHR ) == sizeof( VkDisplayModeParametersKHR ), "struct and wrapper have different size!" );
-
-  struct DisplayModePropertiesKHR
-  {
-    operator VkDisplayModePropertiesKHR const&() const
-    {
-      return *reinterpret_cast<const VkDisplayModePropertiesKHR*>(this);
-    }
-
-    operator VkDisplayModePropertiesKHR &()
-    {
-      return *reinterpret_cast<VkDisplayModePropertiesKHR*>(this);
-    }
-
-    bool operator==( DisplayModePropertiesKHR const& rhs ) const
-    {
-      return ( displayMode == rhs.displayMode )
-          && ( parameters == rhs.parameters );
-    }
-
-    bool operator!=( DisplayModePropertiesKHR const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    DisplayModeKHR displayMode;
-    DisplayModeParametersKHR parameters;
-  };
-  static_assert( sizeof( DisplayModePropertiesKHR ) == sizeof( VkDisplayModePropertiesKHR ), "struct and wrapper have different size!" );
-
-  struct ConformanceVersionKHR
-  {
-    ConformanceVersionKHR( uint8_t major_ = 0,
-                           uint8_t minor_ = 0,
-                           uint8_t subminor_ = 0,
-                           uint8_t patch_ = 0 )
-      : major( major_ )
-      , minor( minor_ )
-      , subminor( subminor_ )
-      , patch( patch_ )
-    {
-    }
-
-    ConformanceVersionKHR( VkConformanceVersionKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ConformanceVersionKHR ) );
-    }
-
-    ConformanceVersionKHR& operator=( VkConformanceVersionKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ConformanceVersionKHR ) );
-      return *this;
-    }
-    ConformanceVersionKHR& setMajor( uint8_t major_ )
-    {
-      major = major_;
-      return *this;
-    }
-
-    ConformanceVersionKHR& setMinor( uint8_t minor_ )
-    {
-      minor = minor_;
-      return *this;
-    }
-
-    ConformanceVersionKHR& setSubminor( uint8_t subminor_ )
-    {
-      subminor = subminor_;
-      return *this;
-    }
-
-    ConformanceVersionKHR& setPatch( uint8_t patch_ )
-    {
-      patch = patch_;
-      return *this;
-    }
-
-    operator VkConformanceVersionKHR const&() const
-    {
-      return *reinterpret_cast<const VkConformanceVersionKHR*>(this);
-    }
-
-    operator VkConformanceVersionKHR &()
-    {
-      return *reinterpret_cast<VkConformanceVersionKHR*>(this);
-    }
-
-    bool operator==( ConformanceVersionKHR const& rhs ) const
-    {
-      return ( major == rhs.major )
-          && ( minor == rhs.minor )
-          && ( subminor == rhs.subminor )
-          && ( patch == rhs.patch );
-    }
-
-    bool operator!=( ConformanceVersionKHR const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    uint8_t major;
-    uint8_t minor;
-    uint8_t subminor;
-    uint8_t patch;
-  };
-  static_assert( sizeof( ConformanceVersionKHR ) == sizeof( VkConformanceVersionKHR ), "struct and wrapper have different size!" );
-
-  struct RectLayerKHR
-  {
-    RectLayerKHR( Offset2D offset_ = Offset2D(),
-                  Extent2D extent_ = Extent2D(),
-                  uint32_t layer_ = 0 )
-      : offset( offset_ )
-      , extent( extent_ )
-      , layer( layer_ )
-    {
-    }
-
-    explicit RectLayerKHR( Rect2D const& rect2D,
-                           uint32_t layer_ = 0 )
-      : offset( rect2D.offset )
-      , extent( rect2D.extent )
-      , layer( layer_ )
-    {}
-
-    RectLayerKHR( VkRectLayerKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( RectLayerKHR ) );
-    }
-
-    RectLayerKHR& operator=( VkRectLayerKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( RectLayerKHR ) );
-      return *this;
-    }
-    RectLayerKHR& setOffset( Offset2D offset_ )
-    {
-      offset = offset_;
-      return *this;
-    }
-
-    RectLayerKHR& setExtent( Extent2D extent_ )
-    {
-      extent = extent_;
-      return *this;
-    }
-
-    RectLayerKHR& setLayer( uint32_t layer_ )
-    {
-      layer = layer_;
-      return *this;
-    }
-
-    operator VkRectLayerKHR const&() const
-    {
-      return *reinterpret_cast<const VkRectLayerKHR*>(this);
-    }
-
-    operator VkRectLayerKHR &()
-    {
-      return *reinterpret_cast<VkRectLayerKHR*>(this);
-    }
-
-    bool operator==( RectLayerKHR const& rhs ) const
-    {
-      return ( offset == rhs.offset )
-          && ( extent == rhs.extent )
-          && ( layer == rhs.layer );
-    }
-
-    bool operator!=( RectLayerKHR const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    Offset2D offset;
-    Extent2D extent;
-    uint32_t layer;
-  };
-  static_assert( sizeof( RectLayerKHR ) == sizeof( VkRectLayerKHR ), "struct and wrapper have different size!" );
-
-  struct PresentRegionKHR
-  {
-    PresentRegionKHR( uint32_t rectangleCount_ = 0,
-                      const RectLayerKHR* pRectangles_ = nullptr )
-      : rectangleCount( rectangleCount_ )
-      , pRectangles( pRectangles_ )
-    {
-    }
-
-    PresentRegionKHR( VkPresentRegionKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PresentRegionKHR ) );
-    }
-
-    PresentRegionKHR& operator=( VkPresentRegionKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PresentRegionKHR ) );
-      return *this;
-    }
-    PresentRegionKHR& setRectangleCount( uint32_t rectangleCount_ )
-    {
-      rectangleCount = rectangleCount_;
-      return *this;
-    }
-
-    PresentRegionKHR& setPRectangles( const RectLayerKHR* pRectangles_ )
-    {
-      pRectangles = pRectangles_;
-      return *this;
-    }
-
-    operator VkPresentRegionKHR const&() const
-    {
-      return *reinterpret_cast<const VkPresentRegionKHR*>(this);
-    }
-
-    operator VkPresentRegionKHR &()
-    {
-      return *reinterpret_cast<VkPresentRegionKHR*>(this);
-    }
-
-    bool operator==( PresentRegionKHR const& rhs ) const
-    {
-      return ( rectangleCount == rhs.rectangleCount )
-          && ( pRectangles == rhs.pRectangles );
-    }
-
-    bool operator!=( PresentRegionKHR const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    uint32_t rectangleCount;
-    const RectLayerKHR* pRectangles;
-  };
-  static_assert( sizeof( PresentRegionKHR ) == sizeof( VkPresentRegionKHR ), "struct and wrapper have different size!" );
-
-  struct XYColorEXT
-  {
-    XYColorEXT( float x_ = 0,
-                float y_ = 0 )
-      : x( x_ )
-      , y( y_ )
-    {
-    }
-
-    XYColorEXT( VkXYColorEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( XYColorEXT ) );
-    }
-
-    XYColorEXT& operator=( VkXYColorEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( XYColorEXT ) );
-      return *this;
-    }
-    XYColorEXT& setX( float x_ )
-    {
-      x = x_;
-      return *this;
-    }
-
-    XYColorEXT& setY( float y_ )
-    {
-      y = y_;
-      return *this;
-    }
-
-    operator VkXYColorEXT const&() const
-    {
-      return *reinterpret_cast<const VkXYColorEXT*>(this);
-    }
-
-    operator VkXYColorEXT &()
-    {
-      return *reinterpret_cast<VkXYColorEXT*>(this);
-    }
-
-    bool operator==( XYColorEXT const& rhs ) const
-    {
-      return ( x == rhs.x )
-          && ( y == rhs.y );
-    }
-
-    bool operator!=( XYColorEXT const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    float x;
-    float y;
-  };
-  static_assert( sizeof( XYColorEXT ) == sizeof( VkXYColorEXT ), "struct and wrapper have different size!" );
-
-  struct RefreshCycleDurationGOOGLE
-  {
-    operator VkRefreshCycleDurationGOOGLE const&() const
-    {
-      return *reinterpret_cast<const VkRefreshCycleDurationGOOGLE*>(this);
-    }
-
-    operator VkRefreshCycleDurationGOOGLE &()
-    {
-      return *reinterpret_cast<VkRefreshCycleDurationGOOGLE*>(this);
-    }
-
-    bool operator==( RefreshCycleDurationGOOGLE const& rhs ) const
-    {
-      return ( refreshDuration == rhs.refreshDuration );
-    }
-
-    bool operator!=( RefreshCycleDurationGOOGLE const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    uint64_t refreshDuration;
-  };
-  static_assert( sizeof( RefreshCycleDurationGOOGLE ) == sizeof( VkRefreshCycleDurationGOOGLE ), "struct and wrapper have different size!" );
-
-  struct PastPresentationTimingGOOGLE
-  {
-    operator VkPastPresentationTimingGOOGLE const&() const
-    {
-      return *reinterpret_cast<const VkPastPresentationTimingGOOGLE*>(this);
-    }
-
-    operator VkPastPresentationTimingGOOGLE &()
-    {
-      return *reinterpret_cast<VkPastPresentationTimingGOOGLE*>(this);
-    }
-
-    bool operator==( PastPresentationTimingGOOGLE const& rhs ) const
-    {
-      return ( presentID == rhs.presentID )
-          && ( desiredPresentTime == rhs.desiredPresentTime )
-          && ( actualPresentTime == rhs.actualPresentTime )
-          && ( earliestPresentTime == rhs.earliestPresentTime )
-          && ( presentMargin == rhs.presentMargin );
-    }
-
-    bool operator!=( PastPresentationTimingGOOGLE const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    uint32_t presentID;
-    uint64_t desiredPresentTime;
-    uint64_t actualPresentTime;
-    uint64_t earliestPresentTime;
-    uint64_t presentMargin;
-  };
-  static_assert( sizeof( PastPresentationTimingGOOGLE ) == sizeof( VkPastPresentationTimingGOOGLE ), "struct and wrapper have different size!" );
-
-  struct PresentTimeGOOGLE
-  {
-    PresentTimeGOOGLE( uint32_t presentID_ = 0,
-                       uint64_t desiredPresentTime_ = 0 )
-      : presentID( presentID_ )
-      , desiredPresentTime( desiredPresentTime_ )
-    {
-    }
-
-    PresentTimeGOOGLE( VkPresentTimeGOOGLE const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PresentTimeGOOGLE ) );
-    }
-
-    PresentTimeGOOGLE& operator=( VkPresentTimeGOOGLE const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PresentTimeGOOGLE ) );
-      return *this;
-    }
-    PresentTimeGOOGLE& setPresentID( uint32_t presentID_ )
-    {
-      presentID = presentID_;
-      return *this;
-    }
-
-    PresentTimeGOOGLE& setDesiredPresentTime( uint64_t desiredPresentTime_ )
-    {
-      desiredPresentTime = desiredPresentTime_;
-      return *this;
-    }
-
-    operator VkPresentTimeGOOGLE const&() const
-    {
-      return *reinterpret_cast<const VkPresentTimeGOOGLE*>(this);
-    }
-
-    operator VkPresentTimeGOOGLE &()
-    {
-      return *reinterpret_cast<VkPresentTimeGOOGLE*>(this);
-    }
-
-    bool operator==( PresentTimeGOOGLE const& rhs ) const
-    {
-      return ( presentID == rhs.presentID )
-          && ( desiredPresentTime == rhs.desiredPresentTime );
-    }
-
-    bool operator!=( PresentTimeGOOGLE const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    uint32_t presentID;
-    uint64_t desiredPresentTime;
-  };
-  static_assert( sizeof( PresentTimeGOOGLE ) == sizeof( VkPresentTimeGOOGLE ), "struct and wrapper have different size!" );
-
-  struct ViewportWScalingNV
-  {
-    ViewportWScalingNV( float xcoeff_ = 0,
-                        float ycoeff_ = 0 )
-      : xcoeff( xcoeff_ )
-      , ycoeff( ycoeff_ )
-    {
-    }
-
-    ViewportWScalingNV( VkViewportWScalingNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ViewportWScalingNV ) );
-    }
-
-    ViewportWScalingNV& operator=( VkViewportWScalingNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ViewportWScalingNV ) );
-      return *this;
-    }
-    ViewportWScalingNV& setXcoeff( float xcoeff_ )
-    {
-      xcoeff = xcoeff_;
-      return *this;
-    }
-
-    ViewportWScalingNV& setYcoeff( float ycoeff_ )
-    {
-      ycoeff = ycoeff_;
-      return *this;
-    }
-
-    operator VkViewportWScalingNV const&() const
-    {
-      return *reinterpret_cast<const VkViewportWScalingNV*>(this);
-    }
-
-    operator VkViewportWScalingNV &()
-    {
-      return *reinterpret_cast<VkViewportWScalingNV*>(this);
-    }
-
-    bool operator==( ViewportWScalingNV const& rhs ) const
-    {
-      return ( xcoeff == rhs.xcoeff )
-          && ( ycoeff == rhs.ycoeff );
-    }
-
-    bool operator!=( ViewportWScalingNV const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    float xcoeff;
-    float ycoeff;
-  };
-  static_assert( sizeof( ViewportWScalingNV ) == sizeof( VkViewportWScalingNV ), "struct and wrapper have different size!" );
-
-  struct SampleLocationEXT
-  {
-    SampleLocationEXT( float x_ = 0,
-                       float y_ = 0 )
-      : x( x_ )
-      , y( y_ )
-    {
-    }
-
-    SampleLocationEXT( VkSampleLocationEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( SampleLocationEXT ) );
-    }
-
-    SampleLocationEXT& operator=( VkSampleLocationEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( SampleLocationEXT ) );
-      return *this;
-    }
-    SampleLocationEXT& setX( float x_ )
-    {
-      x = x_;
-      return *this;
-    }
-
-    SampleLocationEXT& setY( float y_ )
-    {
-      y = y_;
-      return *this;
-    }
-
-    operator VkSampleLocationEXT const&() const
-    {
-      return *reinterpret_cast<const VkSampleLocationEXT*>(this);
-    }
-
-    operator VkSampleLocationEXT &()
-    {
-      return *reinterpret_cast<VkSampleLocationEXT*>(this);
-    }
-
-    bool operator==( SampleLocationEXT const& rhs ) const
-    {
-      return ( x == rhs.x )
-          && ( y == rhs.y );
-    }
-
-    bool operator!=( SampleLocationEXT const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    float x;
-    float y;
-  };
-  static_assert( sizeof( SampleLocationEXT ) == sizeof( VkSampleLocationEXT ), "struct and wrapper have different size!" );
-
-  struct ShaderResourceUsageAMD
-  {
-    operator VkShaderResourceUsageAMD const&() const
-    {
-      return *reinterpret_cast<const VkShaderResourceUsageAMD*>(this);
-    }
-
-    operator VkShaderResourceUsageAMD &()
-    {
-      return *reinterpret_cast<VkShaderResourceUsageAMD*>(this);
-    }
-
-    bool operator==( ShaderResourceUsageAMD const& rhs ) const
-    {
-      return ( numUsedVgprs == rhs.numUsedVgprs )
-          && ( numUsedSgprs == rhs.numUsedSgprs )
-          && ( ldsSizePerLocalWorkGroup == rhs.ldsSizePerLocalWorkGroup )
-          && ( ldsUsageSizeInBytes == rhs.ldsUsageSizeInBytes )
-          && ( scratchMemUsageInBytes == rhs.scratchMemUsageInBytes );
-    }
-
-    bool operator!=( ShaderResourceUsageAMD const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    uint32_t numUsedVgprs;
-    uint32_t numUsedSgprs;
-    uint32_t ldsSizePerLocalWorkGroup;
-    size_t ldsUsageSizeInBytes;
-    size_t scratchMemUsageInBytes;
-  };
-  static_assert( sizeof( ShaderResourceUsageAMD ) == sizeof( VkShaderResourceUsageAMD ), "struct and wrapper have different size!" );
-
-  struct VertexInputBindingDivisorDescriptionEXT
-  {
-    VertexInputBindingDivisorDescriptionEXT( uint32_t binding_ = 0,
-                                             uint32_t divisor_ = 0 )
-      : binding( binding_ )
-      , divisor( divisor_ )
-    {
-    }
-
-    VertexInputBindingDivisorDescriptionEXT( VkVertexInputBindingDivisorDescriptionEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( VertexInputBindingDivisorDescriptionEXT ) );
-    }
-
-    VertexInputBindingDivisorDescriptionEXT& operator=( VkVertexInputBindingDivisorDescriptionEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( VertexInputBindingDivisorDescriptionEXT ) );
-      return *this;
-    }
-    VertexInputBindingDivisorDescriptionEXT& setBinding( uint32_t binding_ )
-    {
-      binding = binding_;
-      return *this;
-    }
-
-    VertexInputBindingDivisorDescriptionEXT& setDivisor( uint32_t divisor_ )
-    {
-      divisor = divisor_;
-      return *this;
-    }
-
-    operator VkVertexInputBindingDivisorDescriptionEXT const&() const
-    {
-      return *reinterpret_cast<const VkVertexInputBindingDivisorDescriptionEXT*>(this);
-    }
-
-    operator VkVertexInputBindingDivisorDescriptionEXT &()
-    {
-      return *reinterpret_cast<VkVertexInputBindingDivisorDescriptionEXT*>(this);
-    }
-
-    bool operator==( VertexInputBindingDivisorDescriptionEXT const& rhs ) const
-    {
-      return ( binding == rhs.binding )
-          && ( divisor == rhs.divisor );
-    }
-
-    bool operator!=( VertexInputBindingDivisorDescriptionEXT const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    uint32_t binding;
-    uint32_t divisor;
-  };
-  static_assert( sizeof( VertexInputBindingDivisorDescriptionEXT ) == sizeof( VkVertexInputBindingDivisorDescriptionEXT ), "struct and wrapper have different size!" );
-
-  struct CoarseSampleLocationNV
-  {
-    CoarseSampleLocationNV( uint32_t pixelX_ = 0,
-                            uint32_t pixelY_ = 0,
-                            uint32_t sample_ = 0 )
-      : pixelX( pixelX_ )
-      , pixelY( pixelY_ )
-      , sample( sample_ )
-    {
-    }
-
-    CoarseSampleLocationNV( VkCoarseSampleLocationNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( CoarseSampleLocationNV ) );
-    }
-
-    CoarseSampleLocationNV& operator=( VkCoarseSampleLocationNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( CoarseSampleLocationNV ) );
-      return *this;
-    }
-    CoarseSampleLocationNV& setPixelX( uint32_t pixelX_ )
-    {
-      pixelX = pixelX_;
-      return *this;
-    }
-
-    CoarseSampleLocationNV& setPixelY( uint32_t pixelY_ )
-    {
-      pixelY = pixelY_;
-      return *this;
-    }
-
-    CoarseSampleLocationNV& setSample( uint32_t sample_ )
-    {
-      sample = sample_;
-      return *this;
-    }
-
-    operator VkCoarseSampleLocationNV const&() const
-    {
-      return *reinterpret_cast<const VkCoarseSampleLocationNV*>(this);
-    }
-
-    operator VkCoarseSampleLocationNV &()
-    {
-      return *reinterpret_cast<VkCoarseSampleLocationNV*>(this);
-    }
-
-    bool operator==( CoarseSampleLocationNV const& rhs ) const
-    {
-      return ( pixelX == rhs.pixelX )
-          && ( pixelY == rhs.pixelY )
-          && ( sample == rhs.sample );
-    }
-
-    bool operator!=( CoarseSampleLocationNV const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    uint32_t pixelX;
-    uint32_t pixelY;
-    uint32_t sample;
-  };
-  static_assert( sizeof( CoarseSampleLocationNV ) == sizeof( VkCoarseSampleLocationNV ), "struct and wrapper have different size!" );
-
-  struct DrawMeshTasksIndirectCommandNV
-  {
-    DrawMeshTasksIndirectCommandNV( uint32_t taskCount_ = 0,
-                                    uint32_t firstTask_ = 0 )
-      : taskCount( taskCount_ )
-      , firstTask( firstTask_ )
-    {
-    }
-
-    DrawMeshTasksIndirectCommandNV( VkDrawMeshTasksIndirectCommandNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DrawMeshTasksIndirectCommandNV ) );
-    }
-
-    DrawMeshTasksIndirectCommandNV& operator=( VkDrawMeshTasksIndirectCommandNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DrawMeshTasksIndirectCommandNV ) );
-      return *this;
-    }
-    DrawMeshTasksIndirectCommandNV& setTaskCount( uint32_t taskCount_ )
-    {
-      taskCount = taskCount_;
-      return *this;
-    }
-
-    DrawMeshTasksIndirectCommandNV& setFirstTask( uint32_t firstTask_ )
-    {
-      firstTask = firstTask_;
-      return *this;
-    }
-
-    operator VkDrawMeshTasksIndirectCommandNV const&() const
-    {
-      return *reinterpret_cast<const VkDrawMeshTasksIndirectCommandNV*>(this);
-    }
-
-    operator VkDrawMeshTasksIndirectCommandNV &()
-    {
-      return *reinterpret_cast<VkDrawMeshTasksIndirectCommandNV*>(this);
-    }
-
-    bool operator==( DrawMeshTasksIndirectCommandNV const& rhs ) const
-    {
-      return ( taskCount == rhs.taskCount )
-          && ( firstTask == rhs.firstTask );
-    }
-
-    bool operator!=( DrawMeshTasksIndirectCommandNV const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    uint32_t taskCount;
-    uint32_t firstTask;
-  };
-  static_assert( sizeof( DrawMeshTasksIndirectCommandNV ) == sizeof( VkDrawMeshTasksIndirectCommandNV ), "struct and wrapper have different size!" );
-
-  enum class ImageLayout
-  {
-    eUndefined = VK_IMAGE_LAYOUT_UNDEFINED,
-    eGeneral = VK_IMAGE_LAYOUT_GENERAL,
-    eColorAttachmentOptimal = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
-    eDepthStencilAttachmentOptimal = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
-    eDepthStencilReadOnlyOptimal = VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL,
-    eShaderReadOnlyOptimal = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
-    eTransferSrcOptimal = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
-    eTransferDstOptimal = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
-    ePreinitialized = VK_IMAGE_LAYOUT_PREINITIALIZED,
-    eDepthReadOnlyStencilAttachmentOptimal = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL,
-    eDepthReadOnlyStencilAttachmentOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL,
-    eDepthAttachmentStencilReadOnlyOptimal = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL,
-    eDepthAttachmentStencilReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL,
-    ePresentSrcKHR = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
-    eSharedPresentKHR = VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR,
-    eShadingRateOptimalNV = VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV
-  };
-
-  struct DescriptorImageInfo
-  {
-    DescriptorImageInfo( Sampler sampler_ = Sampler(),
-                         ImageView imageView_ = ImageView(),
-                         ImageLayout imageLayout_ = ImageLayout::eUndefined )
-      : sampler( sampler_ )
-      , imageView( imageView_ )
-      , imageLayout( imageLayout_ )
-    {
-    }
-
-    DescriptorImageInfo( VkDescriptorImageInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DescriptorImageInfo ) );
-    }
-
-    DescriptorImageInfo& operator=( VkDescriptorImageInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DescriptorImageInfo ) );
-      return *this;
-    }
-    DescriptorImageInfo& setSampler( Sampler sampler_ )
-    {
-      sampler = sampler_;
-      return *this;
-    }
-
-    DescriptorImageInfo& setImageView( ImageView imageView_ )
-    {
-      imageView = imageView_;
-      return *this;
-    }
-
-    DescriptorImageInfo& setImageLayout( ImageLayout imageLayout_ )
-    {
-      imageLayout = imageLayout_;
-      return *this;
-    }
-
-    operator VkDescriptorImageInfo const&() const
-    {
-      return *reinterpret_cast<const VkDescriptorImageInfo*>(this);
-    }
-
-    operator VkDescriptorImageInfo &()
-    {
-      return *reinterpret_cast<VkDescriptorImageInfo*>(this);
-    }
-
-    bool operator==( DescriptorImageInfo const& rhs ) const
-    {
-      return ( sampler == rhs.sampler )
-          && ( imageView == rhs.imageView )
-          && ( imageLayout == rhs.imageLayout );
-    }
-
-    bool operator!=( DescriptorImageInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    Sampler sampler;
-    ImageView imageView;
-    ImageLayout imageLayout;
-  };
-  static_assert( sizeof( DescriptorImageInfo ) == sizeof( VkDescriptorImageInfo ), "struct and wrapper have different size!" );
-
-  struct AttachmentReference
-  {
-    AttachmentReference( uint32_t attachment_ = 0,
-                         ImageLayout layout_ = ImageLayout::eUndefined )
-      : attachment( attachment_ )
-      , layout( layout_ )
-    {
-    }
-
-    AttachmentReference( VkAttachmentReference const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( AttachmentReference ) );
-    }
-
-    AttachmentReference& operator=( VkAttachmentReference const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( AttachmentReference ) );
-      return *this;
-    }
-    AttachmentReference& setAttachment( uint32_t attachment_ )
-    {
-      attachment = attachment_;
-      return *this;
-    }
-
-    AttachmentReference& setLayout( ImageLayout layout_ )
-    {
-      layout = layout_;
-      return *this;
-    }
-
-    operator VkAttachmentReference const&() const
-    {
-      return *reinterpret_cast<const VkAttachmentReference*>(this);
-    }
-
-    operator VkAttachmentReference &()
-    {
-      return *reinterpret_cast<VkAttachmentReference*>(this);
-    }
-
-    bool operator==( AttachmentReference const& rhs ) const
-    {
-      return ( attachment == rhs.attachment )
-          && ( layout == rhs.layout );
-    }
-
-    bool operator!=( AttachmentReference const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    uint32_t attachment;
-    ImageLayout layout;
-  };
-  static_assert( sizeof( AttachmentReference ) == sizeof( VkAttachmentReference ), "struct and wrapper have different size!" );
+  }
 
   enum class AttachmentLoadOp
   {
@@ -8048,438 +3922,34 @@
     eDontCare = VK_ATTACHMENT_LOAD_OP_DONT_CARE
   };
 
+  VULKAN_HPP_INLINE std::string to_string( AttachmentLoadOp value )
+  {
+    switch ( value )
+    {
+      case AttachmentLoadOp::eLoad : return "Load";
+      case AttachmentLoadOp::eClear : return "Clear";
+      case AttachmentLoadOp::eDontCare : return "DontCare";
+      default: return "invalid";
+    }
+  }
+
   enum class AttachmentStoreOp
   {
     eStore = VK_ATTACHMENT_STORE_OP_STORE,
-    eDontCare = VK_ATTACHMENT_STORE_OP_DONT_CARE
+    eDontCare = VK_ATTACHMENT_STORE_OP_DONT_CARE,
+    eNoneQCOM = VK_ATTACHMENT_STORE_OP_NONE_QCOM
   };
 
-  enum class ImageType
+  VULKAN_HPP_INLINE std::string to_string( AttachmentStoreOp value )
   {
-    e1D = VK_IMAGE_TYPE_1D,
-    e2D = VK_IMAGE_TYPE_2D,
-    e3D = VK_IMAGE_TYPE_3D
-  };
-
-  enum class ImageTiling
-  {
-    eOptimal = VK_IMAGE_TILING_OPTIMAL,
-    eLinear = VK_IMAGE_TILING_LINEAR,
-    eDrmFormatModifierEXT = VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT
-  };
-
-  enum class ImageViewType
-  {
-    e1D = VK_IMAGE_VIEW_TYPE_1D,
-    e2D = VK_IMAGE_VIEW_TYPE_2D,
-    e3D = VK_IMAGE_VIEW_TYPE_3D,
-    eCube = VK_IMAGE_VIEW_TYPE_CUBE,
-    e1DArray = VK_IMAGE_VIEW_TYPE_1D_ARRAY,
-    e2DArray = VK_IMAGE_VIEW_TYPE_2D_ARRAY,
-    eCubeArray = VK_IMAGE_VIEW_TYPE_CUBE_ARRAY
-  };
-
-  enum class CommandBufferLevel
-  {
-    ePrimary = VK_COMMAND_BUFFER_LEVEL_PRIMARY,
-    eSecondary = VK_COMMAND_BUFFER_LEVEL_SECONDARY
-  };
-
-  enum class ComponentSwizzle
-  {
-    eIdentity = VK_COMPONENT_SWIZZLE_IDENTITY,
-    eZero = VK_COMPONENT_SWIZZLE_ZERO,
-    eOne = VK_COMPONENT_SWIZZLE_ONE,
-    eR = VK_COMPONENT_SWIZZLE_R,
-    eG = VK_COMPONENT_SWIZZLE_G,
-    eB = VK_COMPONENT_SWIZZLE_B,
-    eA = VK_COMPONENT_SWIZZLE_A
-  };
-
-  struct ComponentMapping
-  {
-    ComponentMapping( ComponentSwizzle r_ = ComponentSwizzle::eIdentity,
-                      ComponentSwizzle g_ = ComponentSwizzle::eIdentity,
-                      ComponentSwizzle b_ = ComponentSwizzle::eIdentity,
-                      ComponentSwizzle a_ = ComponentSwizzle::eIdentity )
-      : r( r_ )
-      , g( g_ )
-      , b( b_ )
-      , a( a_ )
+    switch ( value )
     {
+      case AttachmentStoreOp::eStore : return "Store";
+      case AttachmentStoreOp::eDontCare : return "DontCare";
+      case AttachmentStoreOp::eNoneQCOM : return "NoneQCOM";
+      default: return "invalid";
     }
-
-    ComponentMapping( VkComponentMapping const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ComponentMapping ) );
-    }
-
-    ComponentMapping& operator=( VkComponentMapping const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ComponentMapping ) );
-      return *this;
-    }
-    ComponentMapping& setR( ComponentSwizzle r_ )
-    {
-      r = r_;
-      return *this;
-    }
-
-    ComponentMapping& setG( ComponentSwizzle g_ )
-    {
-      g = g_;
-      return *this;
-    }
-
-    ComponentMapping& setB( ComponentSwizzle b_ )
-    {
-      b = b_;
-      return *this;
-    }
-
-    ComponentMapping& setA( ComponentSwizzle a_ )
-    {
-      a = a_;
-      return *this;
-    }
-
-    operator VkComponentMapping const&() const
-    {
-      return *reinterpret_cast<const VkComponentMapping*>(this);
-    }
-
-    operator VkComponentMapping &()
-    {
-      return *reinterpret_cast<VkComponentMapping*>(this);
-    }
-
-    bool operator==( ComponentMapping const& rhs ) const
-    {
-      return ( r == rhs.r )
-          && ( g == rhs.g )
-          && ( b == rhs.b )
-          && ( a == rhs.a );
-    }
-
-    bool operator!=( ComponentMapping const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    ComponentSwizzle r;
-    ComponentSwizzle g;
-    ComponentSwizzle b;
-    ComponentSwizzle a;
-  };
-  static_assert( sizeof( ComponentMapping ) == sizeof( VkComponentMapping ), "struct and wrapper have different size!" );
-
-  enum class DescriptorType
-  {
-    eSampler = VK_DESCRIPTOR_TYPE_SAMPLER,
-    eCombinedImageSampler = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
-    eSampledImage = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE,
-    eStorageImage = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
-    eUniformTexelBuffer = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER,
-    eStorageTexelBuffer = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER,
-    eUniformBuffer = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
-    eStorageBuffer = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
-    eUniformBufferDynamic = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC,
-    eStorageBufferDynamic = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC,
-    eInputAttachment = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT,
-    eInlineUniformBlockEXT = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT,
-    eAccelerationStructureNV = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV
-  };
-
-  struct DescriptorPoolSize
-  {
-    DescriptorPoolSize( DescriptorType type_ = DescriptorType::eSampler,
-                        uint32_t descriptorCount_ = 0 )
-      : type( type_ )
-      , descriptorCount( descriptorCount_ )
-    {
-    }
-
-    DescriptorPoolSize( VkDescriptorPoolSize const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DescriptorPoolSize ) );
-    }
-
-    DescriptorPoolSize& operator=( VkDescriptorPoolSize const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DescriptorPoolSize ) );
-      return *this;
-    }
-    DescriptorPoolSize& setType( DescriptorType type_ )
-    {
-      type = type_;
-      return *this;
-    }
-
-    DescriptorPoolSize& setDescriptorCount( uint32_t descriptorCount_ )
-    {
-      descriptorCount = descriptorCount_;
-      return *this;
-    }
-
-    operator VkDescriptorPoolSize const&() const
-    {
-      return *reinterpret_cast<const VkDescriptorPoolSize*>(this);
-    }
-
-    operator VkDescriptorPoolSize &()
-    {
-      return *reinterpret_cast<VkDescriptorPoolSize*>(this);
-    }
-
-    bool operator==( DescriptorPoolSize const& rhs ) const
-    {
-      return ( type == rhs.type )
-          && ( descriptorCount == rhs.descriptorCount );
-    }
-
-    bool operator!=( DescriptorPoolSize const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    DescriptorType type;
-    uint32_t descriptorCount;
-  };
-  static_assert( sizeof( DescriptorPoolSize ) == sizeof( VkDescriptorPoolSize ), "struct and wrapper have different size!" );
-
-  struct DescriptorUpdateTemplateEntry
-  {
-    DescriptorUpdateTemplateEntry( uint32_t dstBinding_ = 0,
-                                   uint32_t dstArrayElement_ = 0,
-                                   uint32_t descriptorCount_ = 0,
-                                   DescriptorType descriptorType_ = DescriptorType::eSampler,
-                                   size_t offset_ = 0,
-                                   size_t stride_ = 0 )
-      : dstBinding( dstBinding_ )
-      , dstArrayElement( dstArrayElement_ )
-      , descriptorCount( descriptorCount_ )
-      , descriptorType( descriptorType_ )
-      , offset( offset_ )
-      , stride( stride_ )
-    {
-    }
-
-    DescriptorUpdateTemplateEntry( VkDescriptorUpdateTemplateEntry const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DescriptorUpdateTemplateEntry ) );
-    }
-
-    DescriptorUpdateTemplateEntry& operator=( VkDescriptorUpdateTemplateEntry const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DescriptorUpdateTemplateEntry ) );
-      return *this;
-    }
-    DescriptorUpdateTemplateEntry& setDstBinding( uint32_t dstBinding_ )
-    {
-      dstBinding = dstBinding_;
-      return *this;
-    }
-
-    DescriptorUpdateTemplateEntry& setDstArrayElement( uint32_t dstArrayElement_ )
-    {
-      dstArrayElement = dstArrayElement_;
-      return *this;
-    }
-
-    DescriptorUpdateTemplateEntry& setDescriptorCount( uint32_t descriptorCount_ )
-    {
-      descriptorCount = descriptorCount_;
-      return *this;
-    }
-
-    DescriptorUpdateTemplateEntry& setDescriptorType( DescriptorType descriptorType_ )
-    {
-      descriptorType = descriptorType_;
-      return *this;
-    }
-
-    DescriptorUpdateTemplateEntry& setOffset( size_t offset_ )
-    {
-      offset = offset_;
-      return *this;
-    }
-
-    DescriptorUpdateTemplateEntry& setStride( size_t stride_ )
-    {
-      stride = stride_;
-      return *this;
-    }
-
-    operator VkDescriptorUpdateTemplateEntry const&() const
-    {
-      return *reinterpret_cast<const VkDescriptorUpdateTemplateEntry*>(this);
-    }
-
-    operator VkDescriptorUpdateTemplateEntry &()
-    {
-      return *reinterpret_cast<VkDescriptorUpdateTemplateEntry*>(this);
-    }
-
-    bool operator==( DescriptorUpdateTemplateEntry const& rhs ) const
-    {
-      return ( dstBinding == rhs.dstBinding )
-          && ( dstArrayElement == rhs.dstArrayElement )
-          && ( descriptorCount == rhs.descriptorCount )
-          && ( descriptorType == rhs.descriptorType )
-          && ( offset == rhs.offset )
-          && ( stride == rhs.stride );
-    }
-
-    bool operator!=( DescriptorUpdateTemplateEntry const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    uint32_t dstBinding;
-    uint32_t dstArrayElement;
-    uint32_t descriptorCount;
-    DescriptorType descriptorType;
-    size_t offset;
-    size_t stride;
-  };
-  static_assert( sizeof( DescriptorUpdateTemplateEntry ) == sizeof( VkDescriptorUpdateTemplateEntry ), "struct and wrapper have different size!" );
-
-  using DescriptorUpdateTemplateEntryKHR = DescriptorUpdateTemplateEntry;
-
-  enum class QueryType
-  {
-    eOcclusion = VK_QUERY_TYPE_OCCLUSION,
-    ePipelineStatistics = VK_QUERY_TYPE_PIPELINE_STATISTICS,
-    eTimestamp = VK_QUERY_TYPE_TIMESTAMP,
-    eTransformFeedbackStreamEXT = VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT,
-    eAccelerationStructureCompactedSizeNV = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV
-  };
-
-  enum class BorderColor
-  {
-    eFloatTransparentBlack = VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK,
-    eIntTransparentBlack = VK_BORDER_COLOR_INT_TRANSPARENT_BLACK,
-    eFloatOpaqueBlack = VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK,
-    eIntOpaqueBlack = VK_BORDER_COLOR_INT_OPAQUE_BLACK,
-    eFloatOpaqueWhite = VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE,
-    eIntOpaqueWhite = VK_BORDER_COLOR_INT_OPAQUE_WHITE
-  };
-
-  enum class PipelineBindPoint
-  {
-    eGraphics = VK_PIPELINE_BIND_POINT_GRAPHICS,
-    eCompute = VK_PIPELINE_BIND_POINT_COMPUTE,
-    eRayTracingNV = VK_PIPELINE_BIND_POINT_RAY_TRACING_NV
-  };
-
-  enum class PipelineCacheHeaderVersion
-  {
-    eOne = VK_PIPELINE_CACHE_HEADER_VERSION_ONE
-  };
-
-  enum class PrimitiveTopology
-  {
-    ePointList = VK_PRIMITIVE_TOPOLOGY_POINT_LIST,
-    eLineList = VK_PRIMITIVE_TOPOLOGY_LINE_LIST,
-    eLineStrip = VK_PRIMITIVE_TOPOLOGY_LINE_STRIP,
-    eTriangleList = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST,
-    eTriangleStrip = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP,
-    eTriangleFan = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN,
-    eLineListWithAdjacency = VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY,
-    eLineStripWithAdjacency = VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY,
-    eTriangleListWithAdjacency = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY,
-    eTriangleStripWithAdjacency = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY,
-    ePatchList = VK_PRIMITIVE_TOPOLOGY_PATCH_LIST
-  };
-
-  enum class SharingMode
-  {
-    eExclusive = VK_SHARING_MODE_EXCLUSIVE,
-    eConcurrent = VK_SHARING_MODE_CONCURRENT
-  };
-
-  enum class IndexType
-  {
-    eUint16 = VK_INDEX_TYPE_UINT16,
-    eUint32 = VK_INDEX_TYPE_UINT32,
-    eNoneNV = VK_INDEX_TYPE_NONE_NV
-  };
-
-  enum class Filter
-  {
-    eNearest = VK_FILTER_NEAREST,
-    eLinear = VK_FILTER_LINEAR,
-    eCubicIMG = VK_FILTER_CUBIC_IMG
-  };
-
-  enum class SamplerMipmapMode
-  {
-    eNearest = VK_SAMPLER_MIPMAP_MODE_NEAREST,
-    eLinear = VK_SAMPLER_MIPMAP_MODE_LINEAR
-  };
-
-  enum class SamplerAddressMode
-  {
-    eRepeat = VK_SAMPLER_ADDRESS_MODE_REPEAT,
-    eMirroredRepeat = VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT,
-    eClampToEdge = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
-    eClampToBorder = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER,
-    eMirrorClampToEdge = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE
-  };
-
-  enum class CompareOp
-  {
-    eNever = VK_COMPARE_OP_NEVER,
-    eLess = VK_COMPARE_OP_LESS,
-    eEqual = VK_COMPARE_OP_EQUAL,
-    eLessOrEqual = VK_COMPARE_OP_LESS_OR_EQUAL,
-    eGreater = VK_COMPARE_OP_GREATER,
-    eNotEqual = VK_COMPARE_OP_NOT_EQUAL,
-    eGreaterOrEqual = VK_COMPARE_OP_GREATER_OR_EQUAL,
-    eAlways = VK_COMPARE_OP_ALWAYS
-  };
-
-  enum class PolygonMode
-  {
-    eFill = VK_POLYGON_MODE_FILL,
-    eLine = VK_POLYGON_MODE_LINE,
-    ePoint = VK_POLYGON_MODE_POINT,
-    eFillRectangleNV = VK_POLYGON_MODE_FILL_RECTANGLE_NV
-  };
-
-  enum class CullModeFlagBits
-  {
-    eNone = VK_CULL_MODE_NONE,
-    eFront = VK_CULL_MODE_FRONT_BIT,
-    eBack = VK_CULL_MODE_BACK_BIT,
-    eFrontAndBack = VK_CULL_MODE_FRONT_AND_BACK
-  };
-
-  using CullModeFlags = Flags<CullModeFlagBits, VkCullModeFlags>;
-
-  VULKAN_HPP_INLINE CullModeFlags operator|( CullModeFlagBits bit0, CullModeFlagBits bit1 )
-  {
-    return CullModeFlags( bit0 ) | bit1;
   }
-
-  VULKAN_HPP_INLINE CullModeFlags operator~( CullModeFlagBits bits )
-  {
-    return ~( CullModeFlags( bits ) );
-  }
-
-  template <> struct FlagTraits<CullModeFlagBits>
-  {
-    enum
-    {
-      allFlags = VkFlags(CullModeFlagBits::eNone) | VkFlags(CullModeFlagBits::eFront) | VkFlags(CullModeFlagBits::eBack) | VkFlags(CullModeFlagBits::eFrontAndBack)
-    };
-  };
-
-  enum class FrontFace
-  {
-    eCounterClockwise = VK_FRONT_FACE_COUNTER_CLOCKWISE,
-    eClockwise = VK_FRONT_FACE_CLOCKWISE
-  };
 
   enum class BlendFactor
   {
@@ -8503,6 +3973,33 @@
     eSrc1Alpha = VK_BLEND_FACTOR_SRC1_ALPHA,
     eOneMinusSrc1Alpha = VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA
   };
+
+  VULKAN_HPP_INLINE std::string to_string( BlendFactor value )
+  {
+    switch ( value )
+    {
+      case BlendFactor::eZero : return "Zero";
+      case BlendFactor::eOne : return "One";
+      case BlendFactor::eSrcColor : return "SrcColor";
+      case BlendFactor::eOneMinusSrcColor : return "OneMinusSrcColor";
+      case BlendFactor::eDstColor : return "DstColor";
+      case BlendFactor::eOneMinusDstColor : return "OneMinusDstColor";
+      case BlendFactor::eSrcAlpha : return "SrcAlpha";
+      case BlendFactor::eOneMinusSrcAlpha : return "OneMinusSrcAlpha";
+      case BlendFactor::eDstAlpha : return "DstAlpha";
+      case BlendFactor::eOneMinusDstAlpha : return "OneMinusDstAlpha";
+      case BlendFactor::eConstantColor : return "ConstantColor";
+      case BlendFactor::eOneMinusConstantColor : return "OneMinusConstantColor";
+      case BlendFactor::eConstantAlpha : return "ConstantAlpha";
+      case BlendFactor::eOneMinusConstantAlpha : return "OneMinusConstantAlpha";
+      case BlendFactor::eSrcAlphaSaturate : return "SrcAlphaSaturate";
+      case BlendFactor::eSrc1Color : return "Src1Color";
+      case BlendFactor::eOneMinusSrc1Color : return "OneMinusSrc1Color";
+      case BlendFactor::eSrc1Alpha : return "Src1Alpha";
+      case BlendFactor::eOneMinusSrc1Alpha : return "OneMinusSrc1Alpha";
+      default: return "invalid";
+    }
+  }
 
   enum class BlendOp
   {
@@ -8559,240 +4056,1373 @@
     eBlueEXT = VK_BLEND_OP_BLUE_EXT
   };
 
-  enum class StencilOp
+  VULKAN_HPP_INLINE std::string to_string( BlendOp value )
   {
-    eKeep = VK_STENCIL_OP_KEEP,
-    eZero = VK_STENCIL_OP_ZERO,
-    eReplace = VK_STENCIL_OP_REPLACE,
-    eIncrementAndClamp = VK_STENCIL_OP_INCREMENT_AND_CLAMP,
-    eDecrementAndClamp = VK_STENCIL_OP_DECREMENT_AND_CLAMP,
-    eInvert = VK_STENCIL_OP_INVERT,
-    eIncrementAndWrap = VK_STENCIL_OP_INCREMENT_AND_WRAP,
-    eDecrementAndWrap = VK_STENCIL_OP_DECREMENT_AND_WRAP
+    switch ( value )
+    {
+      case BlendOp::eAdd : return "Add";
+      case BlendOp::eSubtract : return "Subtract";
+      case BlendOp::eReverseSubtract : return "ReverseSubtract";
+      case BlendOp::eMin : return "Min";
+      case BlendOp::eMax : return "Max";
+      case BlendOp::eZeroEXT : return "ZeroEXT";
+      case BlendOp::eSrcEXT : return "SrcEXT";
+      case BlendOp::eDstEXT : return "DstEXT";
+      case BlendOp::eSrcOverEXT : return "SrcOverEXT";
+      case BlendOp::eDstOverEXT : return "DstOverEXT";
+      case BlendOp::eSrcInEXT : return "SrcInEXT";
+      case BlendOp::eDstInEXT : return "DstInEXT";
+      case BlendOp::eSrcOutEXT : return "SrcOutEXT";
+      case BlendOp::eDstOutEXT : return "DstOutEXT";
+      case BlendOp::eSrcAtopEXT : return "SrcAtopEXT";
+      case BlendOp::eDstAtopEXT : return "DstAtopEXT";
+      case BlendOp::eXorEXT : return "XorEXT";
+      case BlendOp::eMultiplyEXT : return "MultiplyEXT";
+      case BlendOp::eScreenEXT : return "ScreenEXT";
+      case BlendOp::eOverlayEXT : return "OverlayEXT";
+      case BlendOp::eDarkenEXT : return "DarkenEXT";
+      case BlendOp::eLightenEXT : return "LightenEXT";
+      case BlendOp::eColordodgeEXT : return "ColordodgeEXT";
+      case BlendOp::eColorburnEXT : return "ColorburnEXT";
+      case BlendOp::eHardlightEXT : return "HardlightEXT";
+      case BlendOp::eSoftlightEXT : return "SoftlightEXT";
+      case BlendOp::eDifferenceEXT : return "DifferenceEXT";
+      case BlendOp::eExclusionEXT : return "ExclusionEXT";
+      case BlendOp::eInvertEXT : return "InvertEXT";
+      case BlendOp::eInvertRgbEXT : return "InvertRgbEXT";
+      case BlendOp::eLineardodgeEXT : return "LineardodgeEXT";
+      case BlendOp::eLinearburnEXT : return "LinearburnEXT";
+      case BlendOp::eVividlightEXT : return "VividlightEXT";
+      case BlendOp::eLinearlightEXT : return "LinearlightEXT";
+      case BlendOp::ePinlightEXT : return "PinlightEXT";
+      case BlendOp::eHardmixEXT : return "HardmixEXT";
+      case BlendOp::eHslHueEXT : return "HslHueEXT";
+      case BlendOp::eHslSaturationEXT : return "HslSaturationEXT";
+      case BlendOp::eHslColorEXT : return "HslColorEXT";
+      case BlendOp::eHslLuminosityEXT : return "HslLuminosityEXT";
+      case BlendOp::ePlusEXT : return "PlusEXT";
+      case BlendOp::ePlusClampedEXT : return "PlusClampedEXT";
+      case BlendOp::ePlusClampedAlphaEXT : return "PlusClampedAlphaEXT";
+      case BlendOp::ePlusDarkerEXT : return "PlusDarkerEXT";
+      case BlendOp::eMinusEXT : return "MinusEXT";
+      case BlendOp::eMinusClampedEXT : return "MinusClampedEXT";
+      case BlendOp::eContrastEXT : return "ContrastEXT";
+      case BlendOp::eInvertOvgEXT : return "InvertOvgEXT";
+      case BlendOp::eRedEXT : return "RedEXT";
+      case BlendOp::eGreenEXT : return "GreenEXT";
+      case BlendOp::eBlueEXT : return "BlueEXT";
+      default: return "invalid";
+    }
+  }
+
+  enum class BlendOverlapEXT
+  {
+    eUncorrelated = VK_BLEND_OVERLAP_UNCORRELATED_EXT,
+    eDisjoint = VK_BLEND_OVERLAP_DISJOINT_EXT,
+    eConjoint = VK_BLEND_OVERLAP_CONJOINT_EXT
   };
 
-  struct StencilOpState
+  VULKAN_HPP_INLINE std::string to_string( BlendOverlapEXT value )
   {
-    StencilOpState( StencilOp failOp_ = StencilOp::eKeep,
-                    StencilOp passOp_ = StencilOp::eKeep,
-                    StencilOp depthFailOp_ = StencilOp::eKeep,
-                    CompareOp compareOp_ = CompareOp::eNever,
-                    uint32_t compareMask_ = 0,
-                    uint32_t writeMask_ = 0,
-                    uint32_t reference_ = 0 )
-      : failOp( failOp_ )
-      , passOp( passOp_ )
-      , depthFailOp( depthFailOp_ )
-      , compareOp( compareOp_ )
-      , compareMask( compareMask_ )
-      , writeMask( writeMask_ )
-      , reference( reference_ )
+    switch ( value )
     {
+      case BlendOverlapEXT::eUncorrelated : return "Uncorrelated";
+      case BlendOverlapEXT::eDisjoint : return "Disjoint";
+      case BlendOverlapEXT::eConjoint : return "Conjoint";
+      default: return "invalid";
     }
+  }
 
-    StencilOpState( VkStencilOpState const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( StencilOpState ) );
-    }
-
-    StencilOpState& operator=( VkStencilOpState const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( StencilOpState ) );
-      return *this;
-    }
-    StencilOpState& setFailOp( StencilOp failOp_ )
-    {
-      failOp = failOp_;
-      return *this;
-    }
-
-    StencilOpState& setPassOp( StencilOp passOp_ )
-    {
-      passOp = passOp_;
-      return *this;
-    }
-
-    StencilOpState& setDepthFailOp( StencilOp depthFailOp_ )
-    {
-      depthFailOp = depthFailOp_;
-      return *this;
-    }
-
-    StencilOpState& setCompareOp( CompareOp compareOp_ )
-    {
-      compareOp = compareOp_;
-      return *this;
-    }
-
-    StencilOpState& setCompareMask( uint32_t compareMask_ )
-    {
-      compareMask = compareMask_;
-      return *this;
-    }
-
-    StencilOpState& setWriteMask( uint32_t writeMask_ )
-    {
-      writeMask = writeMask_;
-      return *this;
-    }
-
-    StencilOpState& setReference( uint32_t reference_ )
-    {
-      reference = reference_;
-      return *this;
-    }
-
-    operator VkStencilOpState const&() const
-    {
-      return *reinterpret_cast<const VkStencilOpState*>(this);
-    }
-
-    operator VkStencilOpState &()
-    {
-      return *reinterpret_cast<VkStencilOpState*>(this);
-    }
-
-    bool operator==( StencilOpState const& rhs ) const
-    {
-      return ( failOp == rhs.failOp )
-          && ( passOp == rhs.passOp )
-          && ( depthFailOp == rhs.depthFailOp )
-          && ( compareOp == rhs.compareOp )
-          && ( compareMask == rhs.compareMask )
-          && ( writeMask == rhs.writeMask )
-          && ( reference == rhs.reference );
-    }
-
-    bool operator!=( StencilOpState const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    StencilOp failOp;
-    StencilOp passOp;
-    StencilOp depthFailOp;
-    CompareOp compareOp;
-    uint32_t compareMask;
-    uint32_t writeMask;
-    uint32_t reference;
-  };
-  static_assert( sizeof( StencilOpState ) == sizeof( VkStencilOpState ), "struct and wrapper have different size!" );
-
-  enum class LogicOp
+  enum class BorderColor
   {
-    eClear = VK_LOGIC_OP_CLEAR,
-    eAnd = VK_LOGIC_OP_AND,
-    eAndReverse = VK_LOGIC_OP_AND_REVERSE,
-    eCopy = VK_LOGIC_OP_COPY,
-    eAndInverted = VK_LOGIC_OP_AND_INVERTED,
-    eNoOp = VK_LOGIC_OP_NO_OP,
-    eXor = VK_LOGIC_OP_XOR,
-    eOr = VK_LOGIC_OP_OR,
-    eNor = VK_LOGIC_OP_NOR,
-    eEquivalent = VK_LOGIC_OP_EQUIVALENT,
-    eInvert = VK_LOGIC_OP_INVERT,
-    eOrReverse = VK_LOGIC_OP_OR_REVERSE,
-    eCopyInverted = VK_LOGIC_OP_COPY_INVERTED,
-    eOrInverted = VK_LOGIC_OP_OR_INVERTED,
-    eNand = VK_LOGIC_OP_NAND,
-    eSet = VK_LOGIC_OP_SET
+    eFloatTransparentBlack = VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK,
+    eIntTransparentBlack = VK_BORDER_COLOR_INT_TRANSPARENT_BLACK,
+    eFloatOpaqueBlack = VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK,
+    eIntOpaqueBlack = VK_BORDER_COLOR_INT_OPAQUE_BLACK,
+    eFloatOpaqueWhite = VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE,
+    eIntOpaqueWhite = VK_BORDER_COLOR_INT_OPAQUE_WHITE,
+    eFloatCustomEXT = VK_BORDER_COLOR_FLOAT_CUSTOM_EXT,
+    eIntCustomEXT = VK_BORDER_COLOR_INT_CUSTOM_EXT
   };
 
-  enum class InternalAllocationType
+  VULKAN_HPP_INLINE std::string to_string( BorderColor value )
   {
-    eExecutable = VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE
+    switch ( value )
+    {
+      case BorderColor::eFloatTransparentBlack : return "FloatTransparentBlack";
+      case BorderColor::eIntTransparentBlack : return "IntTransparentBlack";
+      case BorderColor::eFloatOpaqueBlack : return "FloatOpaqueBlack";
+      case BorderColor::eIntOpaqueBlack : return "IntOpaqueBlack";
+      case BorderColor::eFloatOpaqueWhite : return "FloatOpaqueWhite";
+      case BorderColor::eIntOpaqueWhite : return "IntOpaqueWhite";
+      case BorderColor::eFloatCustomEXT : return "FloatCustomEXT";
+      case BorderColor::eIntCustomEXT : return "IntCustomEXT";
+      default: return "invalid";
+    }
+  }
+
+  enum class BufferCreateFlagBits : VkBufferCreateFlags
+  {
+    eSparseBinding = VK_BUFFER_CREATE_SPARSE_BINDING_BIT,
+    eSparseResidency = VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT,
+    eSparseAliased = VK_BUFFER_CREATE_SPARSE_ALIASED_BIT,
+    eProtected = VK_BUFFER_CREATE_PROTECTED_BIT,
+    eDeviceAddressCaptureReplay = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT,
+    eDeviceAddressCaptureReplayEXT = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT,
+    eDeviceAddressCaptureReplayKHR = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR
   };
 
-  enum class SystemAllocationScope
+  VULKAN_HPP_INLINE std::string to_string( BufferCreateFlagBits value )
   {
-    eCommand = VK_SYSTEM_ALLOCATION_SCOPE_COMMAND,
-    eObject = VK_SYSTEM_ALLOCATION_SCOPE_OBJECT,
-    eCache = VK_SYSTEM_ALLOCATION_SCOPE_CACHE,
-    eDevice = VK_SYSTEM_ALLOCATION_SCOPE_DEVICE,
-    eInstance = VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE
+    switch ( value )
+    {
+      case BufferCreateFlagBits::eSparseBinding : return "SparseBinding";
+      case BufferCreateFlagBits::eSparseResidency : return "SparseResidency";
+      case BufferCreateFlagBits::eSparseAliased : return "SparseAliased";
+      case BufferCreateFlagBits::eProtected : return "Protected";
+      case BufferCreateFlagBits::eDeviceAddressCaptureReplay : return "DeviceAddressCaptureReplay";
+      default: return "invalid";
+    }
+  }
+
+  enum class BufferUsageFlagBits : VkBufferUsageFlags
+  {
+    eTransferSrc = VK_BUFFER_USAGE_TRANSFER_SRC_BIT,
+    eTransferDst = VK_BUFFER_USAGE_TRANSFER_DST_BIT,
+    eUniformTexelBuffer = VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT,
+    eStorageTexelBuffer = VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT,
+    eUniformBuffer = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT,
+    eStorageBuffer = VK_BUFFER_USAGE_STORAGE_BUFFER_BIT,
+    eIndexBuffer = VK_BUFFER_USAGE_INDEX_BUFFER_BIT,
+    eVertexBuffer = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT,
+    eIndirectBuffer = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT,
+    eShaderDeviceAddress = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT,
+    eTransformFeedbackBufferEXT = VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT,
+    eTransformFeedbackCounterBufferEXT = VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT,
+    eConditionalRenderingEXT = VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT,
+    eRayTracingKHR = VK_BUFFER_USAGE_RAY_TRACING_BIT_KHR,
+    eRayTracingNV = VK_BUFFER_USAGE_RAY_TRACING_BIT_NV,
+    eShaderDeviceAddressEXT = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT,
+    eShaderDeviceAddressKHR = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_KHR
   };
 
-  enum class PhysicalDeviceType
+  VULKAN_HPP_INLINE std::string to_string( BufferUsageFlagBits value )
   {
-    eOther = VK_PHYSICAL_DEVICE_TYPE_OTHER,
-    eIntegratedGpu = VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU,
-    eDiscreteGpu = VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU,
-    eVirtualGpu = VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU,
-    eCpu = VK_PHYSICAL_DEVICE_TYPE_CPU
+    switch ( value )
+    {
+      case BufferUsageFlagBits::eTransferSrc : return "TransferSrc";
+      case BufferUsageFlagBits::eTransferDst : return "TransferDst";
+      case BufferUsageFlagBits::eUniformTexelBuffer : return "UniformTexelBuffer";
+      case BufferUsageFlagBits::eStorageTexelBuffer : return "StorageTexelBuffer";
+      case BufferUsageFlagBits::eUniformBuffer : return "UniformBuffer";
+      case BufferUsageFlagBits::eStorageBuffer : return "StorageBuffer";
+      case BufferUsageFlagBits::eIndexBuffer : return "IndexBuffer";
+      case BufferUsageFlagBits::eVertexBuffer : return "VertexBuffer";
+      case BufferUsageFlagBits::eIndirectBuffer : return "IndirectBuffer";
+      case BufferUsageFlagBits::eShaderDeviceAddress : return "ShaderDeviceAddress";
+      case BufferUsageFlagBits::eTransformFeedbackBufferEXT : return "TransformFeedbackBufferEXT";
+      case BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT : return "TransformFeedbackCounterBufferEXT";
+      case BufferUsageFlagBits::eConditionalRenderingEXT : return "ConditionalRenderingEXT";
+      case BufferUsageFlagBits::eRayTracingKHR : return "RayTracingKHR";
+      default: return "invalid";
+    }
+  }
+
+  enum class BuildAccelerationStructureFlagBitsKHR : VkBuildAccelerationStructureFlagsKHR
+  {
+    eAllowUpdate = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR,
+    eAllowCompaction = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR,
+    ePreferFastTrace = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_KHR,
+    ePreferFastBuild = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_KHR,
+    eLowMemory = VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_KHR
+  };
+  using BuildAccelerationStructureFlagBitsNV = BuildAccelerationStructureFlagBitsKHR;
+
+  VULKAN_HPP_INLINE std::string to_string( BuildAccelerationStructureFlagBitsKHR value )
+  {
+    switch ( value )
+    {
+      case BuildAccelerationStructureFlagBitsKHR::eAllowUpdate : return "AllowUpdate";
+      case BuildAccelerationStructureFlagBitsKHR::eAllowCompaction : return "AllowCompaction";
+      case BuildAccelerationStructureFlagBitsKHR::ePreferFastTrace : return "PreferFastTrace";
+      case BuildAccelerationStructureFlagBitsKHR::ePreferFastBuild : return "PreferFastBuild";
+      case BuildAccelerationStructureFlagBitsKHR::eLowMemory : return "LowMemory";
+      default: return "invalid";
+    }
+  }
+
+  enum class ChromaLocation
+  {
+    eCositedEven = VK_CHROMA_LOCATION_COSITED_EVEN,
+    eMidpoint = VK_CHROMA_LOCATION_MIDPOINT
+  };
+  using ChromaLocationKHR = ChromaLocation;
+
+  VULKAN_HPP_INLINE std::string to_string( ChromaLocation value )
+  {
+    switch ( value )
+    {
+      case ChromaLocation::eCositedEven : return "CositedEven";
+      case ChromaLocation::eMidpoint : return "Midpoint";
+      default: return "invalid";
+    }
+  }
+
+  enum class CoarseSampleOrderTypeNV
+  {
+    eDefault = VK_COARSE_SAMPLE_ORDER_TYPE_DEFAULT_NV,
+    eCustom = VK_COARSE_SAMPLE_ORDER_TYPE_CUSTOM_NV,
+    ePixelMajor = VK_COARSE_SAMPLE_ORDER_TYPE_PIXEL_MAJOR_NV,
+    eSampleMajor = VK_COARSE_SAMPLE_ORDER_TYPE_SAMPLE_MAJOR_NV
   };
 
-  enum class VertexInputRate
+  VULKAN_HPP_INLINE std::string to_string( CoarseSampleOrderTypeNV value )
   {
-    eVertex = VK_VERTEX_INPUT_RATE_VERTEX,
-    eInstance = VK_VERTEX_INPUT_RATE_INSTANCE
+    switch ( value )
+    {
+      case CoarseSampleOrderTypeNV::eDefault : return "Default";
+      case CoarseSampleOrderTypeNV::eCustom : return "Custom";
+      case CoarseSampleOrderTypeNV::ePixelMajor : return "PixelMajor";
+      case CoarseSampleOrderTypeNV::eSampleMajor : return "SampleMajor";
+      default: return "invalid";
+    }
+  }
+
+  enum class ColorComponentFlagBits : VkColorComponentFlags
+  {
+    eR = VK_COLOR_COMPONENT_R_BIT,
+    eG = VK_COLOR_COMPONENT_G_BIT,
+    eB = VK_COLOR_COMPONENT_B_BIT,
+    eA = VK_COLOR_COMPONENT_A_BIT
   };
 
-  struct VertexInputBindingDescription
+  VULKAN_HPP_INLINE std::string to_string( ColorComponentFlagBits value )
   {
-    VertexInputBindingDescription( uint32_t binding_ = 0,
-                                   uint32_t stride_ = 0,
-                                   VertexInputRate inputRate_ = VertexInputRate::eVertex )
-      : binding( binding_ )
-      , stride( stride_ )
-      , inputRate( inputRate_ )
+    switch ( value )
     {
+      case ColorComponentFlagBits::eR : return "R";
+      case ColorComponentFlagBits::eG : return "G";
+      case ColorComponentFlagBits::eB : return "B";
+      case ColorComponentFlagBits::eA : return "A";
+      default: return "invalid";
     }
+  }
 
-    VertexInputBindingDescription( VkVertexInputBindingDescription const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( VertexInputBindingDescription ) );
-    }
-
-    VertexInputBindingDescription& operator=( VkVertexInputBindingDescription const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( VertexInputBindingDescription ) );
-      return *this;
-    }
-    VertexInputBindingDescription& setBinding( uint32_t binding_ )
-    {
-      binding = binding_;
-      return *this;
-    }
-
-    VertexInputBindingDescription& setStride( uint32_t stride_ )
-    {
-      stride = stride_;
-      return *this;
-    }
-
-    VertexInputBindingDescription& setInputRate( VertexInputRate inputRate_ )
-    {
-      inputRate = inputRate_;
-      return *this;
-    }
-
-    operator VkVertexInputBindingDescription const&() const
-    {
-      return *reinterpret_cast<const VkVertexInputBindingDescription*>(this);
-    }
-
-    operator VkVertexInputBindingDescription &()
-    {
-      return *reinterpret_cast<VkVertexInputBindingDescription*>(this);
-    }
-
-    bool operator==( VertexInputBindingDescription const& rhs ) const
-    {
-      return ( binding == rhs.binding )
-          && ( stride == rhs.stride )
-          && ( inputRate == rhs.inputRate );
-    }
-
-    bool operator!=( VertexInputBindingDescription const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    uint32_t binding;
-    uint32_t stride;
-    VertexInputRate inputRate;
+  enum class ColorSpaceKHR
+  {
+    eSrgbNonlinear = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR,
+    eDisplayP3NonlinearEXT = VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT,
+    eExtendedSrgbLinearEXT = VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT,
+    eDisplayP3LinearEXT = VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT,
+    eDciP3NonlinearEXT = VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT,
+    eBt709LinearEXT = VK_COLOR_SPACE_BT709_LINEAR_EXT,
+    eBt709NonlinearEXT = VK_COLOR_SPACE_BT709_NONLINEAR_EXT,
+    eBt2020LinearEXT = VK_COLOR_SPACE_BT2020_LINEAR_EXT,
+    eHdr10St2084EXT = VK_COLOR_SPACE_HDR10_ST2084_EXT,
+    eDolbyvisionEXT = VK_COLOR_SPACE_DOLBYVISION_EXT,
+    eHdr10HlgEXT = VK_COLOR_SPACE_HDR10_HLG_EXT,
+    eAdobergbLinearEXT = VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT,
+    eAdobergbNonlinearEXT = VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT,
+    ePassThroughEXT = VK_COLOR_SPACE_PASS_THROUGH_EXT,
+    eExtendedSrgbNonlinearEXT = VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT,
+    eDisplayNativeAMD = VK_COLOR_SPACE_DISPLAY_NATIVE_AMD,
+    eVkColorspaceSrgbNonlinear = VK_COLORSPACE_SRGB_NONLINEAR_KHR,
+    eDciP3LinearEXT = VK_COLOR_SPACE_DCI_P3_LINEAR_EXT
   };
-  static_assert( sizeof( VertexInputBindingDescription ) == sizeof( VkVertexInputBindingDescription ), "struct and wrapper have different size!" );
+
+  VULKAN_HPP_INLINE std::string to_string( ColorSpaceKHR value )
+  {
+    switch ( value )
+    {
+      case ColorSpaceKHR::eSrgbNonlinear : return "SrgbNonlinear";
+      case ColorSpaceKHR::eDisplayP3NonlinearEXT : return "DisplayP3NonlinearEXT";
+      case ColorSpaceKHR::eExtendedSrgbLinearEXT : return "ExtendedSrgbLinearEXT";
+      case ColorSpaceKHR::eDisplayP3LinearEXT : return "DisplayP3LinearEXT";
+      case ColorSpaceKHR::eDciP3NonlinearEXT : return "DciP3NonlinearEXT";
+      case ColorSpaceKHR::eBt709LinearEXT : return "Bt709LinearEXT";
+      case ColorSpaceKHR::eBt709NonlinearEXT : return "Bt709NonlinearEXT";
+      case ColorSpaceKHR::eBt2020LinearEXT : return "Bt2020LinearEXT";
+      case ColorSpaceKHR::eHdr10St2084EXT : return "Hdr10St2084EXT";
+      case ColorSpaceKHR::eDolbyvisionEXT : return "DolbyvisionEXT";
+      case ColorSpaceKHR::eHdr10HlgEXT : return "Hdr10HlgEXT";
+      case ColorSpaceKHR::eAdobergbLinearEXT : return "AdobergbLinearEXT";
+      case ColorSpaceKHR::eAdobergbNonlinearEXT : return "AdobergbNonlinearEXT";
+      case ColorSpaceKHR::ePassThroughEXT : return "PassThroughEXT";
+      case ColorSpaceKHR::eExtendedSrgbNonlinearEXT : return "ExtendedSrgbNonlinearEXT";
+      case ColorSpaceKHR::eDisplayNativeAMD : return "DisplayNativeAMD";
+      default: return "invalid";
+    }
+  }
+
+  enum class CommandBufferLevel
+  {
+    ePrimary = VK_COMMAND_BUFFER_LEVEL_PRIMARY,
+    eSecondary = VK_COMMAND_BUFFER_LEVEL_SECONDARY
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( CommandBufferLevel value )
+  {
+    switch ( value )
+    {
+      case CommandBufferLevel::ePrimary : return "Primary";
+      case CommandBufferLevel::eSecondary : return "Secondary";
+      default: return "invalid";
+    }
+  }
+
+  enum class CommandBufferResetFlagBits : VkCommandBufferResetFlags
+  {
+    eReleaseResources = VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( CommandBufferResetFlagBits value )
+  {
+    switch ( value )
+    {
+      case CommandBufferResetFlagBits::eReleaseResources : return "ReleaseResources";
+      default: return "invalid";
+    }
+  }
+
+  enum class CommandBufferUsageFlagBits : VkCommandBufferUsageFlags
+  {
+    eOneTimeSubmit = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
+    eRenderPassContinue = VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT,
+    eSimultaneousUse = VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( CommandBufferUsageFlagBits value )
+  {
+    switch ( value )
+    {
+      case CommandBufferUsageFlagBits::eOneTimeSubmit : return "OneTimeSubmit";
+      case CommandBufferUsageFlagBits::eRenderPassContinue : return "RenderPassContinue";
+      case CommandBufferUsageFlagBits::eSimultaneousUse : return "SimultaneousUse";
+      default: return "invalid";
+    }
+  }
+
+  enum class CommandPoolCreateFlagBits : VkCommandPoolCreateFlags
+  {
+    eTransient = VK_COMMAND_POOL_CREATE_TRANSIENT_BIT,
+    eResetCommandBuffer = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,
+    eProtected = VK_COMMAND_POOL_CREATE_PROTECTED_BIT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( CommandPoolCreateFlagBits value )
+  {
+    switch ( value )
+    {
+      case CommandPoolCreateFlagBits::eTransient : return "Transient";
+      case CommandPoolCreateFlagBits::eResetCommandBuffer : return "ResetCommandBuffer";
+      case CommandPoolCreateFlagBits::eProtected : return "Protected";
+      default: return "invalid";
+    }
+  }
+
+  enum class CommandPoolResetFlagBits : VkCommandPoolResetFlags
+  {
+    eReleaseResources = VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( CommandPoolResetFlagBits value )
+  {
+    switch ( value )
+    {
+      case CommandPoolResetFlagBits::eReleaseResources : return "ReleaseResources";
+      default: return "invalid";
+    }
+  }
+
+  enum class CompareOp
+  {
+    eNever = VK_COMPARE_OP_NEVER,
+    eLess = VK_COMPARE_OP_LESS,
+    eEqual = VK_COMPARE_OP_EQUAL,
+    eLessOrEqual = VK_COMPARE_OP_LESS_OR_EQUAL,
+    eGreater = VK_COMPARE_OP_GREATER,
+    eNotEqual = VK_COMPARE_OP_NOT_EQUAL,
+    eGreaterOrEqual = VK_COMPARE_OP_GREATER_OR_EQUAL,
+    eAlways = VK_COMPARE_OP_ALWAYS
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( CompareOp value )
+  {
+    switch ( value )
+    {
+      case CompareOp::eNever : return "Never";
+      case CompareOp::eLess : return "Less";
+      case CompareOp::eEqual : return "Equal";
+      case CompareOp::eLessOrEqual : return "LessOrEqual";
+      case CompareOp::eGreater : return "Greater";
+      case CompareOp::eNotEqual : return "NotEqual";
+      case CompareOp::eGreaterOrEqual : return "GreaterOrEqual";
+      case CompareOp::eAlways : return "Always";
+      default: return "invalid";
+    }
+  }
+
+  enum class ComponentSwizzle
+  {
+    eIdentity = VK_COMPONENT_SWIZZLE_IDENTITY,
+    eZero = VK_COMPONENT_SWIZZLE_ZERO,
+    eOne = VK_COMPONENT_SWIZZLE_ONE,
+    eR = VK_COMPONENT_SWIZZLE_R,
+    eG = VK_COMPONENT_SWIZZLE_G,
+    eB = VK_COMPONENT_SWIZZLE_B,
+    eA = VK_COMPONENT_SWIZZLE_A
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( ComponentSwizzle value )
+  {
+    switch ( value )
+    {
+      case ComponentSwizzle::eIdentity : return "Identity";
+      case ComponentSwizzle::eZero : return "Zero";
+      case ComponentSwizzle::eOne : return "One";
+      case ComponentSwizzle::eR : return "R";
+      case ComponentSwizzle::eG : return "G";
+      case ComponentSwizzle::eB : return "B";
+      case ComponentSwizzle::eA : return "A";
+      default: return "invalid";
+    }
+  }
+
+  enum class ComponentTypeNV
+  {
+    eFloat16 = VK_COMPONENT_TYPE_FLOAT16_NV,
+    eFloat32 = VK_COMPONENT_TYPE_FLOAT32_NV,
+    eFloat64 = VK_COMPONENT_TYPE_FLOAT64_NV,
+    eSint8 = VK_COMPONENT_TYPE_SINT8_NV,
+    eSint16 = VK_COMPONENT_TYPE_SINT16_NV,
+    eSint32 = VK_COMPONENT_TYPE_SINT32_NV,
+    eSint64 = VK_COMPONENT_TYPE_SINT64_NV,
+    eUint8 = VK_COMPONENT_TYPE_UINT8_NV,
+    eUint16 = VK_COMPONENT_TYPE_UINT16_NV,
+    eUint32 = VK_COMPONENT_TYPE_UINT32_NV,
+    eUint64 = VK_COMPONENT_TYPE_UINT64_NV
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( ComponentTypeNV value )
+  {
+    switch ( value )
+    {
+      case ComponentTypeNV::eFloat16 : return "Float16";
+      case ComponentTypeNV::eFloat32 : return "Float32";
+      case ComponentTypeNV::eFloat64 : return "Float64";
+      case ComponentTypeNV::eSint8 : return "Sint8";
+      case ComponentTypeNV::eSint16 : return "Sint16";
+      case ComponentTypeNV::eSint32 : return "Sint32";
+      case ComponentTypeNV::eSint64 : return "Sint64";
+      case ComponentTypeNV::eUint8 : return "Uint8";
+      case ComponentTypeNV::eUint16 : return "Uint16";
+      case ComponentTypeNV::eUint32 : return "Uint32";
+      case ComponentTypeNV::eUint64 : return "Uint64";
+      default: return "invalid";
+    }
+  }
+
+  enum class CompositeAlphaFlagBitsKHR : VkCompositeAlphaFlagsKHR
+  {
+    eOpaque = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR,
+    ePreMultiplied = VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR,
+    ePostMultiplied = VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR,
+    eInherit = VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( CompositeAlphaFlagBitsKHR value )
+  {
+    switch ( value )
+    {
+      case CompositeAlphaFlagBitsKHR::eOpaque : return "Opaque";
+      case CompositeAlphaFlagBitsKHR::ePreMultiplied : return "PreMultiplied";
+      case CompositeAlphaFlagBitsKHR::ePostMultiplied : return "PostMultiplied";
+      case CompositeAlphaFlagBitsKHR::eInherit : return "Inherit";
+      default: return "invalid";
+    }
+  }
+
+  enum class ConditionalRenderingFlagBitsEXT : VkConditionalRenderingFlagsEXT
+  {
+    eInverted = VK_CONDITIONAL_RENDERING_INVERTED_BIT_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( ConditionalRenderingFlagBitsEXT value )
+  {
+    switch ( value )
+    {
+      case ConditionalRenderingFlagBitsEXT::eInverted : return "Inverted";
+      default: return "invalid";
+    }
+  }
+
+  enum class ConservativeRasterizationModeEXT
+  {
+    eDisabled = VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT,
+    eOverestimate = VK_CONSERVATIVE_RASTERIZATION_MODE_OVERESTIMATE_EXT,
+    eUnderestimate = VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( ConservativeRasterizationModeEXT value )
+  {
+    switch ( value )
+    {
+      case ConservativeRasterizationModeEXT::eDisabled : return "Disabled";
+      case ConservativeRasterizationModeEXT::eOverestimate : return "Overestimate";
+      case ConservativeRasterizationModeEXT::eUnderestimate : return "Underestimate";
+      default: return "invalid";
+    }
+  }
+
+  enum class CopyAccelerationStructureModeKHR
+  {
+    eClone = VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_KHR,
+    eCompact = VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_KHR,
+    eSerialize = VK_COPY_ACCELERATION_STRUCTURE_MODE_SERIALIZE_KHR,
+    eDeserialize = VK_COPY_ACCELERATION_STRUCTURE_MODE_DESERIALIZE_KHR
+  };
+  using CopyAccelerationStructureModeNV = CopyAccelerationStructureModeKHR;
+
+  VULKAN_HPP_INLINE std::string to_string( CopyAccelerationStructureModeKHR value )
+  {
+    switch ( value )
+    {
+      case CopyAccelerationStructureModeKHR::eClone : return "Clone";
+      case CopyAccelerationStructureModeKHR::eCompact : return "Compact";
+      case CopyAccelerationStructureModeKHR::eSerialize : return "Serialize";
+      case CopyAccelerationStructureModeKHR::eDeserialize : return "Deserialize";
+      default: return "invalid";
+    }
+  }
+
+  enum class CoverageModulationModeNV
+  {
+    eNone = VK_COVERAGE_MODULATION_MODE_NONE_NV,
+    eRgb = VK_COVERAGE_MODULATION_MODE_RGB_NV,
+    eAlpha = VK_COVERAGE_MODULATION_MODE_ALPHA_NV,
+    eRgba = VK_COVERAGE_MODULATION_MODE_RGBA_NV
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( CoverageModulationModeNV value )
+  {
+    switch ( value )
+    {
+      case CoverageModulationModeNV::eNone : return "None";
+      case CoverageModulationModeNV::eRgb : return "Rgb";
+      case CoverageModulationModeNV::eAlpha : return "Alpha";
+      case CoverageModulationModeNV::eRgba : return "Rgba";
+      default: return "invalid";
+    }
+  }
+
+  enum class CoverageReductionModeNV
+  {
+    eMerge = VK_COVERAGE_REDUCTION_MODE_MERGE_NV,
+    eTruncate = VK_COVERAGE_REDUCTION_MODE_TRUNCATE_NV
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( CoverageReductionModeNV value )
+  {
+    switch ( value )
+    {
+      case CoverageReductionModeNV::eMerge : return "Merge";
+      case CoverageReductionModeNV::eTruncate : return "Truncate";
+      default: return "invalid";
+    }
+  }
+
+  enum class CullModeFlagBits : VkCullModeFlags
+  {
+    eNone = VK_CULL_MODE_NONE,
+    eFront = VK_CULL_MODE_FRONT_BIT,
+    eBack = VK_CULL_MODE_BACK_BIT,
+    eFrontAndBack = VK_CULL_MODE_FRONT_AND_BACK
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( CullModeFlagBits value )
+  {
+    switch ( value )
+    {
+      case CullModeFlagBits::eNone : return "None";
+      case CullModeFlagBits::eFront : return "Front";
+      case CullModeFlagBits::eBack : return "Back";
+      case CullModeFlagBits::eFrontAndBack : return "FrontAndBack";
+      default: return "invalid";
+    }
+  }
+
+  enum class DebugReportFlagBitsEXT : VkDebugReportFlagsEXT
+  {
+    eInformation = VK_DEBUG_REPORT_INFORMATION_BIT_EXT,
+    eWarning = VK_DEBUG_REPORT_WARNING_BIT_EXT,
+    ePerformanceWarning = VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
+    eError = VK_DEBUG_REPORT_ERROR_BIT_EXT,
+    eDebug = VK_DEBUG_REPORT_DEBUG_BIT_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( DebugReportFlagBitsEXT value )
+  {
+    switch ( value )
+    {
+      case DebugReportFlagBitsEXT::eInformation : return "Information";
+      case DebugReportFlagBitsEXT::eWarning : return "Warning";
+      case DebugReportFlagBitsEXT::ePerformanceWarning : return "PerformanceWarning";
+      case DebugReportFlagBitsEXT::eError : return "Error";
+      case DebugReportFlagBitsEXT::eDebug : return "Debug";
+      default: return "invalid";
+    }
+  }
+
+  enum class DebugReportObjectTypeEXT
+  {
+    eUnknown = VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT,
+    eInstance = VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT,
+    ePhysicalDevice = VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
+    eDevice = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+    eQueue = VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT,
+    eSemaphore = VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT,
+    eCommandBuffer = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+    eFence = VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT,
+    eDeviceMemory = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
+    eBuffer = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
+    eImage = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+    eEvent = VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT,
+    eQueryPool = VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT,
+    eBufferView = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT,
+    eImageView = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT,
+    eShaderModule = VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
+    ePipelineCache = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT,
+    ePipelineLayout = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT,
+    eRenderPass = VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+    ePipeline = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+    eDescriptorSetLayout = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT,
+    eSampler = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT,
+    eDescriptorPool = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT,
+    eDescriptorSet = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
+    eFramebuffer = VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT,
+    eCommandPool = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT,
+    eSurfaceKHR = VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT,
+    eSwapchainKHR = VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
+    eDebugReportCallbackEXT = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT,
+    eDisplayKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT,
+    eDisplayModeKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT,
+    eValidationCacheEXT = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT,
+    eSamplerYcbcrConversion = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT,
+    eDescriptorUpdateTemplate = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT,
+    eAccelerationStructureKHR = VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR_EXT,
+    eAccelerationStructureNV = VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT,
+    eDebugReport = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT,
+    eDescriptorUpdateTemplateKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR_EXT,
+    eSamplerYcbcrConversionKHR = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR_EXT,
+    eValidationCache = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( DebugReportObjectTypeEXT value )
+  {
+    switch ( value )
+    {
+      case DebugReportObjectTypeEXT::eUnknown : return "Unknown";
+      case DebugReportObjectTypeEXT::eInstance : return "Instance";
+      case DebugReportObjectTypeEXT::ePhysicalDevice : return "PhysicalDevice";
+      case DebugReportObjectTypeEXT::eDevice : return "Device";
+      case DebugReportObjectTypeEXT::eQueue : return "Queue";
+      case DebugReportObjectTypeEXT::eSemaphore : return "Semaphore";
+      case DebugReportObjectTypeEXT::eCommandBuffer : return "CommandBuffer";
+      case DebugReportObjectTypeEXT::eFence : return "Fence";
+      case DebugReportObjectTypeEXT::eDeviceMemory : return "DeviceMemory";
+      case DebugReportObjectTypeEXT::eBuffer : return "Buffer";
+      case DebugReportObjectTypeEXT::eImage : return "Image";
+      case DebugReportObjectTypeEXT::eEvent : return "Event";
+      case DebugReportObjectTypeEXT::eQueryPool : return "QueryPool";
+      case DebugReportObjectTypeEXT::eBufferView : return "BufferView";
+      case DebugReportObjectTypeEXT::eImageView : return "ImageView";
+      case DebugReportObjectTypeEXT::eShaderModule : return "ShaderModule";
+      case DebugReportObjectTypeEXT::ePipelineCache : return "PipelineCache";
+      case DebugReportObjectTypeEXT::ePipelineLayout : return "PipelineLayout";
+      case DebugReportObjectTypeEXT::eRenderPass : return "RenderPass";
+      case DebugReportObjectTypeEXT::ePipeline : return "Pipeline";
+      case DebugReportObjectTypeEXT::eDescriptorSetLayout : return "DescriptorSetLayout";
+      case DebugReportObjectTypeEXT::eSampler : return "Sampler";
+      case DebugReportObjectTypeEXT::eDescriptorPool : return "DescriptorPool";
+      case DebugReportObjectTypeEXT::eDescriptorSet : return "DescriptorSet";
+      case DebugReportObjectTypeEXT::eFramebuffer : return "Framebuffer";
+      case DebugReportObjectTypeEXT::eCommandPool : return "CommandPool";
+      case DebugReportObjectTypeEXT::eSurfaceKHR : return "SurfaceKHR";
+      case DebugReportObjectTypeEXT::eSwapchainKHR : return "SwapchainKHR";
+      case DebugReportObjectTypeEXT::eDebugReportCallbackEXT : return "DebugReportCallbackEXT";
+      case DebugReportObjectTypeEXT::eDisplayKHR : return "DisplayKHR";
+      case DebugReportObjectTypeEXT::eDisplayModeKHR : return "DisplayModeKHR";
+      case DebugReportObjectTypeEXT::eValidationCacheEXT : return "ValidationCacheEXT";
+      case DebugReportObjectTypeEXT::eSamplerYcbcrConversion : return "SamplerYcbcrConversion";
+      case DebugReportObjectTypeEXT::eDescriptorUpdateTemplate : return "DescriptorUpdateTemplate";
+      case DebugReportObjectTypeEXT::eAccelerationStructureKHR : return "AccelerationStructureKHR";
+      default: return "invalid";
+    }
+  }
+
+  enum class DebugUtilsMessageSeverityFlagBitsEXT : VkDebugUtilsMessageSeverityFlagsEXT
+  {
+    eVerbose = VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT,
+    eInfo = VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT,
+    eWarning = VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT,
+    eError = VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageSeverityFlagBitsEXT value )
+  {
+    switch ( value )
+    {
+      case DebugUtilsMessageSeverityFlagBitsEXT::eVerbose : return "Verbose";
+      case DebugUtilsMessageSeverityFlagBitsEXT::eInfo : return "Info";
+      case DebugUtilsMessageSeverityFlagBitsEXT::eWarning : return "Warning";
+      case DebugUtilsMessageSeverityFlagBitsEXT::eError : return "Error";
+      default: return "invalid";
+    }
+  }
+
+  enum class DebugUtilsMessageTypeFlagBitsEXT : VkDebugUtilsMessageTypeFlagsEXT
+  {
+    eGeneral = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT,
+    eValidation = VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT,
+    ePerformance = VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageTypeFlagBitsEXT value )
+  {
+    switch ( value )
+    {
+      case DebugUtilsMessageTypeFlagBitsEXT::eGeneral : return "General";
+      case DebugUtilsMessageTypeFlagBitsEXT::eValidation : return "Validation";
+      case DebugUtilsMessageTypeFlagBitsEXT::ePerformance : return "Performance";
+      default: return "invalid";
+    }
+  }
+
+  enum class DependencyFlagBits : VkDependencyFlags
+  {
+    eByRegion = VK_DEPENDENCY_BY_REGION_BIT,
+    eDeviceGroup = VK_DEPENDENCY_DEVICE_GROUP_BIT,
+    eViewLocal = VK_DEPENDENCY_VIEW_LOCAL_BIT,
+    eDeviceGroupKHR = VK_DEPENDENCY_DEVICE_GROUP_BIT_KHR,
+    eViewLocalKHR = VK_DEPENDENCY_VIEW_LOCAL_BIT_KHR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( DependencyFlagBits value )
+  {
+    switch ( value )
+    {
+      case DependencyFlagBits::eByRegion : return "ByRegion";
+      case DependencyFlagBits::eDeviceGroup : return "DeviceGroup";
+      case DependencyFlagBits::eViewLocal : return "ViewLocal";
+      default: return "invalid";
+    }
+  }
+
+  enum class DescriptorBindingFlagBits : VkDescriptorBindingFlags
+  {
+    eUpdateAfterBind = VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT,
+    eUpdateUnusedWhilePending = VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT,
+    ePartiallyBound = VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT,
+    eVariableDescriptorCount = VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT
+  };
+  using DescriptorBindingFlagBitsEXT = DescriptorBindingFlagBits;
+
+  VULKAN_HPP_INLINE std::string to_string( DescriptorBindingFlagBits value )
+  {
+    switch ( value )
+    {
+      case DescriptorBindingFlagBits::eUpdateAfterBind : return "UpdateAfterBind";
+      case DescriptorBindingFlagBits::eUpdateUnusedWhilePending : return "UpdateUnusedWhilePending";
+      case DescriptorBindingFlagBits::ePartiallyBound : return "PartiallyBound";
+      case DescriptorBindingFlagBits::eVariableDescriptorCount : return "VariableDescriptorCount";
+      default: return "invalid";
+    }
+  }
+
+  enum class DescriptorPoolCreateFlagBits : VkDescriptorPoolCreateFlags
+  {
+    eFreeDescriptorSet = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT,
+    eUpdateAfterBind = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT,
+    eUpdateAfterBindEXT = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( DescriptorPoolCreateFlagBits value )
+  {
+    switch ( value )
+    {
+      case DescriptorPoolCreateFlagBits::eFreeDescriptorSet : return "FreeDescriptorSet";
+      case DescriptorPoolCreateFlagBits::eUpdateAfterBind : return "UpdateAfterBind";
+      default: return "invalid";
+    }
+  }
+
+  enum class DescriptorSetLayoutCreateFlagBits : VkDescriptorSetLayoutCreateFlags
+  {
+    eUpdateAfterBindPool = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT,
+    ePushDescriptorKHR = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR,
+    eUpdateAfterBindPoolEXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( DescriptorSetLayoutCreateFlagBits value )
+  {
+    switch ( value )
+    {
+      case DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPool : return "UpdateAfterBindPool";
+      case DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR : return "PushDescriptorKHR";
+      default: return "invalid";
+    }
+  }
+
+  enum class DescriptorType
+  {
+    eSampler = VK_DESCRIPTOR_TYPE_SAMPLER,
+    eCombinedImageSampler = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
+    eSampledImage = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE,
+    eStorageImage = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
+    eUniformTexelBuffer = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER,
+    eStorageTexelBuffer = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER,
+    eUniformBuffer = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
+    eStorageBuffer = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
+    eUniformBufferDynamic = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC,
+    eStorageBufferDynamic = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC,
+    eInputAttachment = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT,
+    eInlineUniformBlockEXT = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT,
+    eAccelerationStructureKHR = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR,
+    eAccelerationStructureNV = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( DescriptorType value )
+  {
+    switch ( value )
+    {
+      case DescriptorType::eSampler : return "Sampler";
+      case DescriptorType::eCombinedImageSampler : return "CombinedImageSampler";
+      case DescriptorType::eSampledImage : return "SampledImage";
+      case DescriptorType::eStorageImage : return "StorageImage";
+      case DescriptorType::eUniformTexelBuffer : return "UniformTexelBuffer";
+      case DescriptorType::eStorageTexelBuffer : return "StorageTexelBuffer";
+      case DescriptorType::eUniformBuffer : return "UniformBuffer";
+      case DescriptorType::eStorageBuffer : return "StorageBuffer";
+      case DescriptorType::eUniformBufferDynamic : return "UniformBufferDynamic";
+      case DescriptorType::eStorageBufferDynamic : return "StorageBufferDynamic";
+      case DescriptorType::eInputAttachment : return "InputAttachment";
+      case DescriptorType::eInlineUniformBlockEXT : return "InlineUniformBlockEXT";
+      case DescriptorType::eAccelerationStructureKHR : return "AccelerationStructureKHR";
+      default: return "invalid";
+    }
+  }
+
+  enum class DescriptorUpdateTemplateType
+  {
+    eDescriptorSet = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET,
+    ePushDescriptorsKHR = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR
+  };
+  using DescriptorUpdateTemplateTypeKHR = DescriptorUpdateTemplateType;
+
+  VULKAN_HPP_INLINE std::string to_string( DescriptorUpdateTemplateType value )
+  {
+    switch ( value )
+    {
+      case DescriptorUpdateTemplateType::eDescriptorSet : return "DescriptorSet";
+      case DescriptorUpdateTemplateType::ePushDescriptorsKHR : return "PushDescriptorsKHR";
+      default: return "invalid";
+    }
+  }
+
+  enum class DeviceCreateFlagBits
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( DeviceCreateFlagBits )
+  {
+    return "(void)";
+  }
+
+  enum class DeviceDiagnosticsConfigFlagBitsNV : VkDeviceDiagnosticsConfigFlagsNV
+  {
+    eEnableShaderDebugInfo = VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_SHADER_DEBUG_INFO_BIT_NV,
+    eEnableResourceTracking = VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_RESOURCE_TRACKING_BIT_NV,
+    eEnableAutomaticCheckpoints = VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_AUTOMATIC_CHECKPOINTS_BIT_NV
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( DeviceDiagnosticsConfigFlagBitsNV value )
+  {
+    switch ( value )
+    {
+      case DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderDebugInfo : return "EnableShaderDebugInfo";
+      case DeviceDiagnosticsConfigFlagBitsNV::eEnableResourceTracking : return "EnableResourceTracking";
+      case DeviceDiagnosticsConfigFlagBitsNV::eEnableAutomaticCheckpoints : return "EnableAutomaticCheckpoints";
+      default: return "invalid";
+    }
+  }
+
+  enum class DeviceEventTypeEXT
+  {
+    eDisplayHotplug = VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( DeviceEventTypeEXT value )
+  {
+    switch ( value )
+    {
+      case DeviceEventTypeEXT::eDisplayHotplug : return "DisplayHotplug";
+      default: return "invalid";
+    }
+  }
+
+  enum class DeviceGroupPresentModeFlagBitsKHR : VkDeviceGroupPresentModeFlagsKHR
+  {
+    eLocal = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR,
+    eRemote = VK_DEVICE_GROUP_PRESENT_MODE_REMOTE_BIT_KHR,
+    eSum = VK_DEVICE_GROUP_PRESENT_MODE_SUM_BIT_KHR,
+    eLocalMultiDevice = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_MULTI_DEVICE_BIT_KHR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( DeviceGroupPresentModeFlagBitsKHR value )
+  {
+    switch ( value )
+    {
+      case DeviceGroupPresentModeFlagBitsKHR::eLocal : return "Local";
+      case DeviceGroupPresentModeFlagBitsKHR::eRemote : return "Remote";
+      case DeviceGroupPresentModeFlagBitsKHR::eSum : return "Sum";
+      case DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice : return "LocalMultiDevice";
+      default: return "invalid";
+    }
+  }
+
+  enum class DeviceMemoryReportEventTypeEXT
+  {
+    eAllocate = VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_ALLOCATE_EXT,
+    eFree = VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_FREE_EXT,
+    eImport = VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_IMPORT_EXT,
+    eUnimport = VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_UNIMPORT_EXT,
+    eAllocationFailed = VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_ALLOCATION_FAILED_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( DeviceMemoryReportEventTypeEXT value )
+  {
+    switch ( value )
+    {
+      case DeviceMemoryReportEventTypeEXT::eAllocate : return "Allocate";
+      case DeviceMemoryReportEventTypeEXT::eFree : return "Free";
+      case DeviceMemoryReportEventTypeEXT::eImport : return "Import";
+      case DeviceMemoryReportEventTypeEXT::eUnimport : return "Unimport";
+      case DeviceMemoryReportEventTypeEXT::eAllocationFailed : return "AllocationFailed";
+      default: return "invalid";
+    }
+  }
+
+  enum class DeviceQueueCreateFlagBits : VkDeviceQueueCreateFlags
+  {
+    eProtected = VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( DeviceQueueCreateFlagBits value )
+  {
+    switch ( value )
+    {
+      case DeviceQueueCreateFlagBits::eProtected : return "Protected";
+      default: return "invalid";
+    }
+  }
+
+  enum class DiscardRectangleModeEXT
+  {
+    eInclusive = VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT,
+    eExclusive = VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( DiscardRectangleModeEXT value )
+  {
+    switch ( value )
+    {
+      case DiscardRectangleModeEXT::eInclusive : return "Inclusive";
+      case DiscardRectangleModeEXT::eExclusive : return "Exclusive";
+      default: return "invalid";
+    }
+  }
+
+  enum class DisplayEventTypeEXT
+  {
+    eFirstPixelOut = VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( DisplayEventTypeEXT value )
+  {
+    switch ( value )
+    {
+      case DisplayEventTypeEXT::eFirstPixelOut : return "FirstPixelOut";
+      default: return "invalid";
+    }
+  }
+
+  enum class DisplayPlaneAlphaFlagBitsKHR : VkDisplayPlaneAlphaFlagsKHR
+  {
+    eOpaque = VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR,
+    eGlobal = VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR,
+    ePerPixel = VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR,
+    ePerPixelPremultiplied = VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( DisplayPlaneAlphaFlagBitsKHR value )
+  {
+    switch ( value )
+    {
+      case DisplayPlaneAlphaFlagBitsKHR::eOpaque : return "Opaque";
+      case DisplayPlaneAlphaFlagBitsKHR::eGlobal : return "Global";
+      case DisplayPlaneAlphaFlagBitsKHR::ePerPixel : return "PerPixel";
+      case DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied : return "PerPixelPremultiplied";
+      default: return "invalid";
+    }
+  }
+
+  enum class DisplayPowerStateEXT
+  {
+    eOff = VK_DISPLAY_POWER_STATE_OFF_EXT,
+    eSuspend = VK_DISPLAY_POWER_STATE_SUSPEND_EXT,
+    eOn = VK_DISPLAY_POWER_STATE_ON_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( DisplayPowerStateEXT value )
+  {
+    switch ( value )
+    {
+      case DisplayPowerStateEXT::eOff : return "Off";
+      case DisplayPowerStateEXT::eSuspend : return "Suspend";
+      case DisplayPowerStateEXT::eOn : return "On";
+      default: return "invalid";
+    }
+  }
+
+  enum class DriverId
+  {
+    eAmdProprietary = VK_DRIVER_ID_AMD_PROPRIETARY,
+    eAmdOpenSource = VK_DRIVER_ID_AMD_OPEN_SOURCE,
+    eMesaRadv = VK_DRIVER_ID_MESA_RADV,
+    eNvidiaProprietary = VK_DRIVER_ID_NVIDIA_PROPRIETARY,
+    eIntelProprietaryWindows = VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS,
+    eIntelOpenSourceMESA = VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA,
+    eImaginationProprietary = VK_DRIVER_ID_IMAGINATION_PROPRIETARY,
+    eQualcommProprietary = VK_DRIVER_ID_QUALCOMM_PROPRIETARY,
+    eArmProprietary = VK_DRIVER_ID_ARM_PROPRIETARY,
+    eGoogleSwiftshader = VK_DRIVER_ID_GOOGLE_SWIFTSHADER,
+    eGgpProprietary = VK_DRIVER_ID_GGP_PROPRIETARY,
+    eBroadcomProprietary = VK_DRIVER_ID_BROADCOM_PROPRIETARY,
+    eMesaLlvmpipe = VK_DRIVER_ID_MESA_LLVMPIPE,
+    eMoltenvk = VK_DRIVER_ID_MOLTENVK,
+    eIntelOpenSourceMesa = VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA_KHR
+  };
+  using DriverIdKHR = DriverId;
+
+  VULKAN_HPP_INLINE std::string to_string( DriverId value )
+  {
+    switch ( value )
+    {
+      case DriverId::eAmdProprietary : return "AmdProprietary";
+      case DriverId::eAmdOpenSource : return "AmdOpenSource";
+      case DriverId::eMesaRadv : return "MesaRadv";
+      case DriverId::eNvidiaProprietary : return "NvidiaProprietary";
+      case DriverId::eIntelProprietaryWindows : return "IntelProprietaryWindows";
+      case DriverId::eIntelOpenSourceMESA : return "IntelOpenSourceMESA";
+      case DriverId::eImaginationProprietary : return "ImaginationProprietary";
+      case DriverId::eQualcommProprietary : return "QualcommProprietary";
+      case DriverId::eArmProprietary : return "ArmProprietary";
+      case DriverId::eGoogleSwiftshader : return "GoogleSwiftshader";
+      case DriverId::eGgpProprietary : return "GgpProprietary";
+      case DriverId::eBroadcomProprietary : return "BroadcomProprietary";
+      case DriverId::eMesaLlvmpipe : return "MesaLlvmpipe";
+      case DriverId::eMoltenvk : return "Moltenvk";
+      default: return "invalid";
+    }
+  }
+
+  enum class DynamicState
+  {
+    eViewport = VK_DYNAMIC_STATE_VIEWPORT,
+    eScissor = VK_DYNAMIC_STATE_SCISSOR,
+    eLineWidth = VK_DYNAMIC_STATE_LINE_WIDTH,
+    eDepthBias = VK_DYNAMIC_STATE_DEPTH_BIAS,
+    eBlendConstants = VK_DYNAMIC_STATE_BLEND_CONSTANTS,
+    eDepthBounds = VK_DYNAMIC_STATE_DEPTH_BOUNDS,
+    eStencilCompareMask = VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK,
+    eStencilWriteMask = VK_DYNAMIC_STATE_STENCIL_WRITE_MASK,
+    eStencilReference = VK_DYNAMIC_STATE_STENCIL_REFERENCE,
+    eViewportWScalingNV = VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV,
+    eDiscardRectangleEXT = VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT,
+    eSampleLocationsEXT = VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT,
+    eViewportShadingRatePaletteNV = VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV,
+    eViewportCoarseSampleOrderNV = VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV,
+    eExclusiveScissorNV = VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV,
+    eFragmentShadingRateKHR = VK_DYNAMIC_STATE_FRAGMENT_SHADING_RATE_KHR,
+    eLineStippleEXT = VK_DYNAMIC_STATE_LINE_STIPPLE_EXT,
+    eCullModeEXT = VK_DYNAMIC_STATE_CULL_MODE_EXT,
+    eFrontFaceEXT = VK_DYNAMIC_STATE_FRONT_FACE_EXT,
+    ePrimitiveTopologyEXT = VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT,
+    eViewportWithCountEXT = VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT,
+    eScissorWithCountEXT = VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT,
+    eVertexInputBindingStrideEXT = VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT,
+    eDepthTestEnableEXT = VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE_EXT,
+    eDepthWriteEnableEXT = VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE_EXT,
+    eDepthCompareOpEXT = VK_DYNAMIC_STATE_DEPTH_COMPARE_OP_EXT,
+    eDepthBoundsTestEnableEXT = VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE_EXT,
+    eStencilTestEnableEXT = VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE_EXT,
+    eStencilOpEXT = VK_DYNAMIC_STATE_STENCIL_OP_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( DynamicState value )
+  {
+    switch ( value )
+    {
+      case DynamicState::eViewport : return "Viewport";
+      case DynamicState::eScissor : return "Scissor";
+      case DynamicState::eLineWidth : return "LineWidth";
+      case DynamicState::eDepthBias : return "DepthBias";
+      case DynamicState::eBlendConstants : return "BlendConstants";
+      case DynamicState::eDepthBounds : return "DepthBounds";
+      case DynamicState::eStencilCompareMask : return "StencilCompareMask";
+      case DynamicState::eStencilWriteMask : return "StencilWriteMask";
+      case DynamicState::eStencilReference : return "StencilReference";
+      case DynamicState::eViewportWScalingNV : return "ViewportWScalingNV";
+      case DynamicState::eDiscardRectangleEXT : return "DiscardRectangleEXT";
+      case DynamicState::eSampleLocationsEXT : return "SampleLocationsEXT";
+      case DynamicState::eViewportShadingRatePaletteNV : return "ViewportShadingRatePaletteNV";
+      case DynamicState::eViewportCoarseSampleOrderNV : return "ViewportCoarseSampleOrderNV";
+      case DynamicState::eExclusiveScissorNV : return "ExclusiveScissorNV";
+      case DynamicState::eFragmentShadingRateKHR : return "FragmentShadingRateKHR";
+      case DynamicState::eLineStippleEXT : return "LineStippleEXT";
+      case DynamicState::eCullModeEXT : return "CullModeEXT";
+      case DynamicState::eFrontFaceEXT : return "FrontFaceEXT";
+      case DynamicState::ePrimitiveTopologyEXT : return "PrimitiveTopologyEXT";
+      case DynamicState::eViewportWithCountEXT : return "ViewportWithCountEXT";
+      case DynamicState::eScissorWithCountEXT : return "ScissorWithCountEXT";
+      case DynamicState::eVertexInputBindingStrideEXT : return "VertexInputBindingStrideEXT";
+      case DynamicState::eDepthTestEnableEXT : return "DepthTestEnableEXT";
+      case DynamicState::eDepthWriteEnableEXT : return "DepthWriteEnableEXT";
+      case DynamicState::eDepthCompareOpEXT : return "DepthCompareOpEXT";
+      case DynamicState::eDepthBoundsTestEnableEXT : return "DepthBoundsTestEnableEXT";
+      case DynamicState::eStencilTestEnableEXT : return "StencilTestEnableEXT";
+      case DynamicState::eStencilOpEXT : return "StencilOpEXT";
+      default: return "invalid";
+    }
+  }
+
+  enum class ExternalFenceFeatureFlagBits : VkExternalFenceFeatureFlags
+  {
+    eExportable = VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT,
+    eImportable = VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT
+  };
+  using ExternalFenceFeatureFlagBitsKHR = ExternalFenceFeatureFlagBits;
+
+  VULKAN_HPP_INLINE std::string to_string( ExternalFenceFeatureFlagBits value )
+  {
+    switch ( value )
+    {
+      case ExternalFenceFeatureFlagBits::eExportable : return "Exportable";
+      case ExternalFenceFeatureFlagBits::eImportable : return "Importable";
+      default: return "invalid";
+    }
+  }
+
+  enum class ExternalFenceHandleTypeFlagBits : VkExternalFenceHandleTypeFlags
+  {
+    eOpaqueFd = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT,
+    eOpaqueWin32 = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT,
+    eOpaqueWin32Kmt = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT,
+    eSyncFd = VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT
+  };
+  using ExternalFenceHandleTypeFlagBitsKHR = ExternalFenceHandleTypeFlagBits;
+
+  VULKAN_HPP_INLINE std::string to_string( ExternalFenceHandleTypeFlagBits value )
+  {
+    switch ( value )
+    {
+      case ExternalFenceHandleTypeFlagBits::eOpaqueFd : return "OpaqueFd";
+      case ExternalFenceHandleTypeFlagBits::eOpaqueWin32 : return "OpaqueWin32";
+      case ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt : return "OpaqueWin32Kmt";
+      case ExternalFenceHandleTypeFlagBits::eSyncFd : return "SyncFd";
+      default: return "invalid";
+    }
+  }
+
+  enum class ExternalMemoryFeatureFlagBits : VkExternalMemoryFeatureFlags
+  {
+    eDedicatedOnly = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT,
+    eExportable = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT,
+    eImportable = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT
+  };
+  using ExternalMemoryFeatureFlagBitsKHR = ExternalMemoryFeatureFlagBits;
+
+  VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlagBits value )
+  {
+    switch ( value )
+    {
+      case ExternalMemoryFeatureFlagBits::eDedicatedOnly : return "DedicatedOnly";
+      case ExternalMemoryFeatureFlagBits::eExportable : return "Exportable";
+      case ExternalMemoryFeatureFlagBits::eImportable : return "Importable";
+      default: return "invalid";
+    }
+  }
+
+  enum class ExternalMemoryFeatureFlagBitsNV : VkExternalMemoryFeatureFlagsNV
+  {
+    eDedicatedOnly = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV,
+    eExportable = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV,
+    eImportable = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlagBitsNV value )
+  {
+    switch ( value )
+    {
+      case ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly : return "DedicatedOnly";
+      case ExternalMemoryFeatureFlagBitsNV::eExportable : return "Exportable";
+      case ExternalMemoryFeatureFlagBitsNV::eImportable : return "Importable";
+      default: return "invalid";
+    }
+  }
+
+  enum class ExternalMemoryHandleTypeFlagBits : VkExternalMemoryHandleTypeFlags
+  {
+    eOpaqueFd = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT,
+    eOpaqueWin32 = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT,
+    eOpaqueWin32Kmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT,
+    eD3D11Texture = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT,
+    eD3D11TextureKmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT,
+    eD3D12Heap = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT,
+    eD3D12Resource = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT,
+    eDmaBufEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT,
+    eAndroidHardwareBufferANDROID = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID,
+    eHostAllocationEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT,
+    eHostMappedForeignMemoryEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT
+  };
+  using ExternalMemoryHandleTypeFlagBitsKHR = ExternalMemoryHandleTypeFlagBits;
+
+  VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlagBits value )
+  {
+    switch ( value )
+    {
+      case ExternalMemoryHandleTypeFlagBits::eOpaqueFd : return "OpaqueFd";
+      case ExternalMemoryHandleTypeFlagBits::eOpaqueWin32 : return "OpaqueWin32";
+      case ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt : return "OpaqueWin32Kmt";
+      case ExternalMemoryHandleTypeFlagBits::eD3D11Texture : return "D3D11Texture";
+      case ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt : return "D3D11TextureKmt";
+      case ExternalMemoryHandleTypeFlagBits::eD3D12Heap : return "D3D12Heap";
+      case ExternalMemoryHandleTypeFlagBits::eD3D12Resource : return "D3D12Resource";
+      case ExternalMemoryHandleTypeFlagBits::eDmaBufEXT : return "DmaBufEXT";
+      case ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID : return "AndroidHardwareBufferANDROID";
+      case ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT : return "HostAllocationEXT";
+      case ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT : return "HostMappedForeignMemoryEXT";
+      default: return "invalid";
+    }
+  }
+
+  enum class ExternalMemoryHandleTypeFlagBitsNV : VkExternalMemoryHandleTypeFlagsNV
+  {
+    eOpaqueWin32 = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV,
+    eOpaqueWin32Kmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV,
+    eD3D11Image = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_BIT_NV,
+    eD3D11ImageKmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlagBitsNV value )
+  {
+    switch ( value )
+    {
+      case ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32 : return "OpaqueWin32";
+      case ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt : return "OpaqueWin32Kmt";
+      case ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image : return "D3D11Image";
+      case ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt : return "D3D11ImageKmt";
+      default: return "invalid";
+    }
+  }
+
+  enum class ExternalSemaphoreFeatureFlagBits : VkExternalSemaphoreFeatureFlags
+  {
+    eExportable = VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT,
+    eImportable = VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT
+  };
+  using ExternalSemaphoreFeatureFlagBitsKHR = ExternalSemaphoreFeatureFlagBits;
+
+  VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreFeatureFlagBits value )
+  {
+    switch ( value )
+    {
+      case ExternalSemaphoreFeatureFlagBits::eExportable : return "Exportable";
+      case ExternalSemaphoreFeatureFlagBits::eImportable : return "Importable";
+      default: return "invalid";
+    }
+  }
+
+  enum class ExternalSemaphoreHandleTypeFlagBits : VkExternalSemaphoreHandleTypeFlags
+  {
+    eOpaqueFd = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT,
+    eOpaqueWin32 = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT,
+    eOpaqueWin32Kmt = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT,
+    eD3D12Fence = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT,
+    eSyncFd = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT,
+    eD3D11Fence = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D11_FENCE_BIT
+  };
+  using ExternalSemaphoreHandleTypeFlagBitsKHR = ExternalSemaphoreHandleTypeFlagBits;
+
+  VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreHandleTypeFlagBits value )
+  {
+    switch ( value )
+    {
+      case ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd : return "OpaqueFd";
+      case ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32 : return "OpaqueWin32";
+      case ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt : return "OpaqueWin32Kmt";
+      case ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence : return "D3D12Fence";
+      case ExternalSemaphoreHandleTypeFlagBits::eSyncFd : return "SyncFd";
+      default: return "invalid";
+    }
+  }
+
+  enum class FenceCreateFlagBits : VkFenceCreateFlags
+  {
+    eSignaled = VK_FENCE_CREATE_SIGNALED_BIT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( FenceCreateFlagBits value )
+  {
+    switch ( value )
+    {
+      case FenceCreateFlagBits::eSignaled : return "Signaled";
+      default: return "invalid";
+    }
+  }
+
+  enum class FenceImportFlagBits : VkFenceImportFlags
+  {
+    eTemporary = VK_FENCE_IMPORT_TEMPORARY_BIT
+  };
+  using FenceImportFlagBitsKHR = FenceImportFlagBits;
+
+  VULKAN_HPP_INLINE std::string to_string( FenceImportFlagBits value )
+  {
+    switch ( value )
+    {
+      case FenceImportFlagBits::eTemporary : return "Temporary";
+      default: return "invalid";
+    }
+  }
+
+  enum class Filter
+  {
+    eNearest = VK_FILTER_NEAREST,
+    eLinear = VK_FILTER_LINEAR,
+    eCubicIMG = VK_FILTER_CUBIC_IMG,
+    eCubicEXT = VK_FILTER_CUBIC_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( Filter value )
+  {
+    switch ( value )
+    {
+      case Filter::eNearest : return "Nearest";
+      case Filter::eLinear : return "Linear";
+      case Filter::eCubicIMG : return "CubicIMG";
+      default: return "invalid";
+    }
+  }
 
   enum class Format
   {
@@ -8982,73 +5612,39 @@
     eAstc12x12UnormBlock = VK_FORMAT_ASTC_12x12_UNORM_BLOCK,
     eAstc12x12SrgbBlock = VK_FORMAT_ASTC_12x12_SRGB_BLOCK,
     eG8B8G8R8422Unorm = VK_FORMAT_G8B8G8R8_422_UNORM,
-    eG8B8G8R8422UnormKHR = VK_FORMAT_G8B8G8R8_422_UNORM,
     eB8G8R8G8422Unorm = VK_FORMAT_B8G8R8G8_422_UNORM,
-    eB8G8R8G8422UnormKHR = VK_FORMAT_B8G8R8G8_422_UNORM,
     eG8B8R83Plane420Unorm = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM,
-    eG8B8R83Plane420UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM,
     eG8B8R82Plane420Unorm = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM,
-    eG8B8R82Plane420UnormKHR = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM,
     eG8B8R83Plane422Unorm = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM,
-    eG8B8R83Plane422UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM,
     eG8B8R82Plane422Unorm = VK_FORMAT_G8_B8R8_2PLANE_422_UNORM,
-    eG8B8R82Plane422UnormKHR = VK_FORMAT_G8_B8R8_2PLANE_422_UNORM,
     eG8B8R83Plane444Unorm = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM,
-    eG8B8R83Plane444UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM,
     eR10X6UnormPack16 = VK_FORMAT_R10X6_UNORM_PACK16,
-    eR10X6UnormPack16KHR = VK_FORMAT_R10X6_UNORM_PACK16,
     eR10X6G10X6Unorm2Pack16 = VK_FORMAT_R10X6G10X6_UNORM_2PACK16,
-    eR10X6G10X6Unorm2Pack16KHR = VK_FORMAT_R10X6G10X6_UNORM_2PACK16,
     eR10X6G10X6B10X6A10X6Unorm4Pack16 = VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16,
-    eR10X6G10X6B10X6A10X6Unorm4Pack16KHR = VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16,
     eG10X6B10X6G10X6R10X6422Unorm4Pack16 = VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16,
-    eG10X6B10X6G10X6R10X6422Unorm4Pack16KHR = VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16,
     eB10X6G10X6R10X6G10X6422Unorm4Pack16 = VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16,
-    eB10X6G10X6R10X6G10X6422Unorm4Pack16KHR = VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16,
     eG10X6B10X6R10X63Plane420Unorm3Pack16 = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16,
-    eG10X6B10X6R10X63Plane420Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16,
     eG10X6B10X6R10X62Plane420Unorm3Pack16 = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16,
-    eG10X6B10X6R10X62Plane420Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16,
     eG10X6B10X6R10X63Plane422Unorm3Pack16 = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16,
-    eG10X6B10X6R10X63Plane422Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16,
     eG10X6B10X6R10X62Plane422Unorm3Pack16 = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16,
-    eG10X6B10X6R10X62Plane422Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16,
     eG10X6B10X6R10X63Plane444Unorm3Pack16 = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16,
-    eG10X6B10X6R10X63Plane444Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16,
     eR12X4UnormPack16 = VK_FORMAT_R12X4_UNORM_PACK16,
-    eR12X4UnormPack16KHR = VK_FORMAT_R12X4_UNORM_PACK16,
     eR12X4G12X4Unorm2Pack16 = VK_FORMAT_R12X4G12X4_UNORM_2PACK16,
-    eR12X4G12X4Unorm2Pack16KHR = VK_FORMAT_R12X4G12X4_UNORM_2PACK16,
     eR12X4G12X4B12X4A12X4Unorm4Pack16 = VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16,
-    eR12X4G12X4B12X4A12X4Unorm4Pack16KHR = VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16,
     eG12X4B12X4G12X4R12X4422Unorm4Pack16 = VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16,
-    eG12X4B12X4G12X4R12X4422Unorm4Pack16KHR = VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16,
     eB12X4G12X4R12X4G12X4422Unorm4Pack16 = VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16,
-    eB12X4G12X4R12X4G12X4422Unorm4Pack16KHR = VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16,
     eG12X4B12X4R12X43Plane420Unorm3Pack16 = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16,
-    eG12X4B12X4R12X43Plane420Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16,
     eG12X4B12X4R12X42Plane420Unorm3Pack16 = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16,
-    eG12X4B12X4R12X42Plane420Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16,
     eG12X4B12X4R12X43Plane422Unorm3Pack16 = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16,
-    eG12X4B12X4R12X43Plane422Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16,
     eG12X4B12X4R12X42Plane422Unorm3Pack16 = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16,
-    eG12X4B12X4R12X42Plane422Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16,
     eG12X4B12X4R12X43Plane444Unorm3Pack16 = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16,
-    eG12X4B12X4R12X43Plane444Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16,
     eG16B16G16R16422Unorm = VK_FORMAT_G16B16G16R16_422_UNORM,
-    eG16B16G16R16422UnormKHR = VK_FORMAT_G16B16G16R16_422_UNORM,
     eB16G16R16G16422Unorm = VK_FORMAT_B16G16R16G16_422_UNORM,
-    eB16G16R16G16422UnormKHR = VK_FORMAT_B16G16R16G16_422_UNORM,
     eG16B16R163Plane420Unorm = VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM,
-    eG16B16R163Plane420UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM,
     eG16B16R162Plane420Unorm = VK_FORMAT_G16_B16R16_2PLANE_420_UNORM,
-    eG16B16R162Plane420UnormKHR = VK_FORMAT_G16_B16R16_2PLANE_420_UNORM,
     eG16B16R163Plane422Unorm = VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM,
-    eG16B16R163Plane422UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM,
     eG16B16R162Plane422Unorm = VK_FORMAT_G16_B16R16_2PLANE_422_UNORM,
-    eG16B16R162Plane422UnormKHR = VK_FORMAT_G16_B16R16_2PLANE_422_UNORM,
     eG16B16R163Plane444Unorm = VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM,
-    eG16B16R163Plane444UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM,
     ePvrtc12BppUnormBlockIMG = VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG,
     ePvrtc14BppUnormBlockIMG = VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG,
     ePvrtc22BppUnormBlockIMG = VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG,
@@ -9056,85 +5652,2433 @@
     ePvrtc12BppSrgbBlockIMG = VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG,
     ePvrtc14BppSrgbBlockIMG = VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG,
     ePvrtc22BppSrgbBlockIMG = VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG,
-    ePvrtc24BppSrgbBlockIMG = VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG
+    ePvrtc24BppSrgbBlockIMG = VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG,
+    eAstc4x4SfloatBlockEXT = VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK_EXT,
+    eAstc5x4SfloatBlockEXT = VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK_EXT,
+    eAstc5x5SfloatBlockEXT = VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK_EXT,
+    eAstc6x5SfloatBlockEXT = VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK_EXT,
+    eAstc6x6SfloatBlockEXT = VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK_EXT,
+    eAstc8x5SfloatBlockEXT = VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK_EXT,
+    eAstc8x6SfloatBlockEXT = VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK_EXT,
+    eAstc8x8SfloatBlockEXT = VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK_EXT,
+    eAstc10x5SfloatBlockEXT = VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK_EXT,
+    eAstc10x6SfloatBlockEXT = VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK_EXT,
+    eAstc10x8SfloatBlockEXT = VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK_EXT,
+    eAstc10x10SfloatBlockEXT = VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK_EXT,
+    eAstc12x10SfloatBlockEXT = VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK_EXT,
+    eAstc12x12SfloatBlockEXT = VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK_EXT,
+    eA4R4G4B4UnormPack16EXT = VK_FORMAT_A4R4G4B4_UNORM_PACK16_EXT,
+    eA4B4G4R4UnormPack16EXT = VK_FORMAT_A4B4G4R4_UNORM_PACK16_EXT,
+    eB10X6G10X6R10X6G10X6422Unorm4Pack16KHR = VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16_KHR,
+    eB12X4G12X4R12X4G12X4422Unorm4Pack16KHR = VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16_KHR,
+    eB16G16R16G16422UnormKHR = VK_FORMAT_B16G16R16G16_422_UNORM_KHR,
+    eB8G8R8G8422UnormKHR = VK_FORMAT_B8G8R8G8_422_UNORM_KHR,
+    eG10X6B10X6G10X6R10X6422Unorm4Pack16KHR = VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16_KHR,
+    eG10X6B10X6R10X62Plane420Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16_KHR,
+    eG10X6B10X6R10X62Plane422Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16_KHR,
+    eG10X6B10X6R10X63Plane420Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16_KHR,
+    eG10X6B10X6R10X63Plane422Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16_KHR,
+    eG10X6B10X6R10X63Plane444Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16_KHR,
+    eG12X4B12X4G12X4R12X4422Unorm4Pack16KHR = VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16_KHR,
+    eG12X4B12X4R12X42Plane420Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16_KHR,
+    eG12X4B12X4R12X42Plane422Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16_KHR,
+    eG12X4B12X4R12X43Plane420Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16_KHR,
+    eG12X4B12X4R12X43Plane422Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16_KHR,
+    eG12X4B12X4R12X43Plane444Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16_KHR,
+    eG16B16G16R16422UnormKHR = VK_FORMAT_G16B16G16R16_422_UNORM_KHR,
+    eG16B16R162Plane420UnormKHR = VK_FORMAT_G16_B16R16_2PLANE_420_UNORM_KHR,
+    eG16B16R162Plane422UnormKHR = VK_FORMAT_G16_B16R16_2PLANE_422_UNORM_KHR,
+    eG16B16R163Plane420UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM_KHR,
+    eG16B16R163Plane422UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM_KHR,
+    eG16B16R163Plane444UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM_KHR,
+    eG8B8G8R8422UnormKHR = VK_FORMAT_G8B8G8R8_422_UNORM_KHR,
+    eG8B8R82Plane420UnormKHR = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM_KHR,
+    eG8B8R82Plane422UnormKHR = VK_FORMAT_G8_B8R8_2PLANE_422_UNORM_KHR,
+    eG8B8R83Plane420UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM_KHR,
+    eG8B8R83Plane422UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM_KHR,
+    eG8B8R83Plane444UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM_KHR,
+    eR10X6G10X6B10X6A10X6Unorm4Pack16KHR = VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16_KHR,
+    eR10X6G10X6Unorm2Pack16KHR = VK_FORMAT_R10X6G10X6_UNORM_2PACK16_KHR,
+    eR10X6UnormPack16KHR = VK_FORMAT_R10X6_UNORM_PACK16_KHR,
+    eR12X4G12X4B12X4A12X4Unorm4Pack16KHR = VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16_KHR,
+    eR12X4G12X4Unorm2Pack16KHR = VK_FORMAT_R12X4G12X4_UNORM_2PACK16_KHR,
+    eR12X4UnormPack16KHR = VK_FORMAT_R12X4_UNORM_PACK16_KHR
   };
 
-  struct VertexInputAttributeDescription
+  VULKAN_HPP_INLINE std::string to_string( Format value )
   {
-    VertexInputAttributeDescription( uint32_t location_ = 0,
-                                     uint32_t binding_ = 0,
-                                     Format format_ = Format::eUndefined,
-                                     uint32_t offset_ = 0 )
-      : location( location_ )
-      , binding( binding_ )
-      , format( format_ )
-      , offset( offset_ )
+    switch ( value )
     {
+      case Format::eUndefined : return "Undefined";
+      case Format::eR4G4UnormPack8 : return "R4G4UnormPack8";
+      case Format::eR4G4B4A4UnormPack16 : return "R4G4B4A4UnormPack16";
+      case Format::eB4G4R4A4UnormPack16 : return "B4G4R4A4UnormPack16";
+      case Format::eR5G6B5UnormPack16 : return "R5G6B5UnormPack16";
+      case Format::eB5G6R5UnormPack16 : return "B5G6R5UnormPack16";
+      case Format::eR5G5B5A1UnormPack16 : return "R5G5B5A1UnormPack16";
+      case Format::eB5G5R5A1UnormPack16 : return "B5G5R5A1UnormPack16";
+      case Format::eA1R5G5B5UnormPack16 : return "A1R5G5B5UnormPack16";
+      case Format::eR8Unorm : return "R8Unorm";
+      case Format::eR8Snorm : return "R8Snorm";
+      case Format::eR8Uscaled : return "R8Uscaled";
+      case Format::eR8Sscaled : return "R8Sscaled";
+      case Format::eR8Uint : return "R8Uint";
+      case Format::eR8Sint : return "R8Sint";
+      case Format::eR8Srgb : return "R8Srgb";
+      case Format::eR8G8Unorm : return "R8G8Unorm";
+      case Format::eR8G8Snorm : return "R8G8Snorm";
+      case Format::eR8G8Uscaled : return "R8G8Uscaled";
+      case Format::eR8G8Sscaled : return "R8G8Sscaled";
+      case Format::eR8G8Uint : return "R8G8Uint";
+      case Format::eR8G8Sint : return "R8G8Sint";
+      case Format::eR8G8Srgb : return "R8G8Srgb";
+      case Format::eR8G8B8Unorm : return "R8G8B8Unorm";
+      case Format::eR8G8B8Snorm : return "R8G8B8Snorm";
+      case Format::eR8G8B8Uscaled : return "R8G8B8Uscaled";
+      case Format::eR8G8B8Sscaled : return "R8G8B8Sscaled";
+      case Format::eR8G8B8Uint : return "R8G8B8Uint";
+      case Format::eR8G8B8Sint : return "R8G8B8Sint";
+      case Format::eR8G8B8Srgb : return "R8G8B8Srgb";
+      case Format::eB8G8R8Unorm : return "B8G8R8Unorm";
+      case Format::eB8G8R8Snorm : return "B8G8R8Snorm";
+      case Format::eB8G8R8Uscaled : return "B8G8R8Uscaled";
+      case Format::eB8G8R8Sscaled : return "B8G8R8Sscaled";
+      case Format::eB8G8R8Uint : return "B8G8R8Uint";
+      case Format::eB8G8R8Sint : return "B8G8R8Sint";
+      case Format::eB8G8R8Srgb : return "B8G8R8Srgb";
+      case Format::eR8G8B8A8Unorm : return "R8G8B8A8Unorm";
+      case Format::eR8G8B8A8Snorm : return "R8G8B8A8Snorm";
+      case Format::eR8G8B8A8Uscaled : return "R8G8B8A8Uscaled";
+      case Format::eR8G8B8A8Sscaled : return "R8G8B8A8Sscaled";
+      case Format::eR8G8B8A8Uint : return "R8G8B8A8Uint";
+      case Format::eR8G8B8A8Sint : return "R8G8B8A8Sint";
+      case Format::eR8G8B8A8Srgb : return "R8G8B8A8Srgb";
+      case Format::eB8G8R8A8Unorm : return "B8G8R8A8Unorm";
+      case Format::eB8G8R8A8Snorm : return "B8G8R8A8Snorm";
+      case Format::eB8G8R8A8Uscaled : return "B8G8R8A8Uscaled";
+      case Format::eB8G8R8A8Sscaled : return "B8G8R8A8Sscaled";
+      case Format::eB8G8R8A8Uint : return "B8G8R8A8Uint";
+      case Format::eB8G8R8A8Sint : return "B8G8R8A8Sint";
+      case Format::eB8G8R8A8Srgb : return "B8G8R8A8Srgb";
+      case Format::eA8B8G8R8UnormPack32 : return "A8B8G8R8UnormPack32";
+      case Format::eA8B8G8R8SnormPack32 : return "A8B8G8R8SnormPack32";
+      case Format::eA8B8G8R8UscaledPack32 : return "A8B8G8R8UscaledPack32";
+      case Format::eA8B8G8R8SscaledPack32 : return "A8B8G8R8SscaledPack32";
+      case Format::eA8B8G8R8UintPack32 : return "A8B8G8R8UintPack32";
+      case Format::eA8B8G8R8SintPack32 : return "A8B8G8R8SintPack32";
+      case Format::eA8B8G8R8SrgbPack32 : return "A8B8G8R8SrgbPack32";
+      case Format::eA2R10G10B10UnormPack32 : return "A2R10G10B10UnormPack32";
+      case Format::eA2R10G10B10SnormPack32 : return "A2R10G10B10SnormPack32";
+      case Format::eA2R10G10B10UscaledPack32 : return "A2R10G10B10UscaledPack32";
+      case Format::eA2R10G10B10SscaledPack32 : return "A2R10G10B10SscaledPack32";
+      case Format::eA2R10G10B10UintPack32 : return "A2R10G10B10UintPack32";
+      case Format::eA2R10G10B10SintPack32 : return "A2R10G10B10SintPack32";
+      case Format::eA2B10G10R10UnormPack32 : return "A2B10G10R10UnormPack32";
+      case Format::eA2B10G10R10SnormPack32 : return "A2B10G10R10SnormPack32";
+      case Format::eA2B10G10R10UscaledPack32 : return "A2B10G10R10UscaledPack32";
+      case Format::eA2B10G10R10SscaledPack32 : return "A2B10G10R10SscaledPack32";
+      case Format::eA2B10G10R10UintPack32 : return "A2B10G10R10UintPack32";
+      case Format::eA2B10G10R10SintPack32 : return "A2B10G10R10SintPack32";
+      case Format::eR16Unorm : return "R16Unorm";
+      case Format::eR16Snorm : return "R16Snorm";
+      case Format::eR16Uscaled : return "R16Uscaled";
+      case Format::eR16Sscaled : return "R16Sscaled";
+      case Format::eR16Uint : return "R16Uint";
+      case Format::eR16Sint : return "R16Sint";
+      case Format::eR16Sfloat : return "R16Sfloat";
+      case Format::eR16G16Unorm : return "R16G16Unorm";
+      case Format::eR16G16Snorm : return "R16G16Snorm";
+      case Format::eR16G16Uscaled : return "R16G16Uscaled";
+      case Format::eR16G16Sscaled : return "R16G16Sscaled";
+      case Format::eR16G16Uint : return "R16G16Uint";
+      case Format::eR16G16Sint : return "R16G16Sint";
+      case Format::eR16G16Sfloat : return "R16G16Sfloat";
+      case Format::eR16G16B16Unorm : return "R16G16B16Unorm";
+      case Format::eR16G16B16Snorm : return "R16G16B16Snorm";
+      case Format::eR16G16B16Uscaled : return "R16G16B16Uscaled";
+      case Format::eR16G16B16Sscaled : return "R16G16B16Sscaled";
+      case Format::eR16G16B16Uint : return "R16G16B16Uint";
+      case Format::eR16G16B16Sint : return "R16G16B16Sint";
+      case Format::eR16G16B16Sfloat : return "R16G16B16Sfloat";
+      case Format::eR16G16B16A16Unorm : return "R16G16B16A16Unorm";
+      case Format::eR16G16B16A16Snorm : return "R16G16B16A16Snorm";
+      case Format::eR16G16B16A16Uscaled : return "R16G16B16A16Uscaled";
+      case Format::eR16G16B16A16Sscaled : return "R16G16B16A16Sscaled";
+      case Format::eR16G16B16A16Uint : return "R16G16B16A16Uint";
+      case Format::eR16G16B16A16Sint : return "R16G16B16A16Sint";
+      case Format::eR16G16B16A16Sfloat : return "R16G16B16A16Sfloat";
+      case Format::eR32Uint : return "R32Uint";
+      case Format::eR32Sint : return "R32Sint";
+      case Format::eR32Sfloat : return "R32Sfloat";
+      case Format::eR32G32Uint : return "R32G32Uint";
+      case Format::eR32G32Sint : return "R32G32Sint";
+      case Format::eR32G32Sfloat : return "R32G32Sfloat";
+      case Format::eR32G32B32Uint : return "R32G32B32Uint";
+      case Format::eR32G32B32Sint : return "R32G32B32Sint";
+      case Format::eR32G32B32Sfloat : return "R32G32B32Sfloat";
+      case Format::eR32G32B32A32Uint : return "R32G32B32A32Uint";
+      case Format::eR32G32B32A32Sint : return "R32G32B32A32Sint";
+      case Format::eR32G32B32A32Sfloat : return "R32G32B32A32Sfloat";
+      case Format::eR64Uint : return "R64Uint";
+      case Format::eR64Sint : return "R64Sint";
+      case Format::eR64Sfloat : return "R64Sfloat";
+      case Format::eR64G64Uint : return "R64G64Uint";
+      case Format::eR64G64Sint : return "R64G64Sint";
+      case Format::eR64G64Sfloat : return "R64G64Sfloat";
+      case Format::eR64G64B64Uint : return "R64G64B64Uint";
+      case Format::eR64G64B64Sint : return "R64G64B64Sint";
+      case Format::eR64G64B64Sfloat : return "R64G64B64Sfloat";
+      case Format::eR64G64B64A64Uint : return "R64G64B64A64Uint";
+      case Format::eR64G64B64A64Sint : return "R64G64B64A64Sint";
+      case Format::eR64G64B64A64Sfloat : return "R64G64B64A64Sfloat";
+      case Format::eB10G11R11UfloatPack32 : return "B10G11R11UfloatPack32";
+      case Format::eE5B9G9R9UfloatPack32 : return "E5B9G9R9UfloatPack32";
+      case Format::eD16Unorm : return "D16Unorm";
+      case Format::eX8D24UnormPack32 : return "X8D24UnormPack32";
+      case Format::eD32Sfloat : return "D32Sfloat";
+      case Format::eS8Uint : return "S8Uint";
+      case Format::eD16UnormS8Uint : return "D16UnormS8Uint";
+      case Format::eD24UnormS8Uint : return "D24UnormS8Uint";
+      case Format::eD32SfloatS8Uint : return "D32SfloatS8Uint";
+      case Format::eBc1RgbUnormBlock : return "Bc1RgbUnormBlock";
+      case Format::eBc1RgbSrgbBlock : return "Bc1RgbSrgbBlock";
+      case Format::eBc1RgbaUnormBlock : return "Bc1RgbaUnormBlock";
+      case Format::eBc1RgbaSrgbBlock : return "Bc1RgbaSrgbBlock";
+      case Format::eBc2UnormBlock : return "Bc2UnormBlock";
+      case Format::eBc2SrgbBlock : return "Bc2SrgbBlock";
+      case Format::eBc3UnormBlock : return "Bc3UnormBlock";
+      case Format::eBc3SrgbBlock : return "Bc3SrgbBlock";
+      case Format::eBc4UnormBlock : return "Bc4UnormBlock";
+      case Format::eBc4SnormBlock : return "Bc4SnormBlock";
+      case Format::eBc5UnormBlock : return "Bc5UnormBlock";
+      case Format::eBc5SnormBlock : return "Bc5SnormBlock";
+      case Format::eBc6HUfloatBlock : return "Bc6HUfloatBlock";
+      case Format::eBc6HSfloatBlock : return "Bc6HSfloatBlock";
+      case Format::eBc7UnormBlock : return "Bc7UnormBlock";
+      case Format::eBc7SrgbBlock : return "Bc7SrgbBlock";
+      case Format::eEtc2R8G8B8UnormBlock : return "Etc2R8G8B8UnormBlock";
+      case Format::eEtc2R8G8B8SrgbBlock : return "Etc2R8G8B8SrgbBlock";
+      case Format::eEtc2R8G8B8A1UnormBlock : return "Etc2R8G8B8A1UnormBlock";
+      case Format::eEtc2R8G8B8A1SrgbBlock : return "Etc2R8G8B8A1SrgbBlock";
+      case Format::eEtc2R8G8B8A8UnormBlock : return "Etc2R8G8B8A8UnormBlock";
+      case Format::eEtc2R8G8B8A8SrgbBlock : return "Etc2R8G8B8A8SrgbBlock";
+      case Format::eEacR11UnormBlock : return "EacR11UnormBlock";
+      case Format::eEacR11SnormBlock : return "EacR11SnormBlock";
+      case Format::eEacR11G11UnormBlock : return "EacR11G11UnormBlock";
+      case Format::eEacR11G11SnormBlock : return "EacR11G11SnormBlock";
+      case Format::eAstc4x4UnormBlock : return "Astc4x4UnormBlock";
+      case Format::eAstc4x4SrgbBlock : return "Astc4x4SrgbBlock";
+      case Format::eAstc5x4UnormBlock : return "Astc5x4UnormBlock";
+      case Format::eAstc5x4SrgbBlock : return "Astc5x4SrgbBlock";
+      case Format::eAstc5x5UnormBlock : return "Astc5x5UnormBlock";
+      case Format::eAstc5x5SrgbBlock : return "Astc5x5SrgbBlock";
+      case Format::eAstc6x5UnormBlock : return "Astc6x5UnormBlock";
+      case Format::eAstc6x5SrgbBlock : return "Astc6x5SrgbBlock";
+      case Format::eAstc6x6UnormBlock : return "Astc6x6UnormBlock";
+      case Format::eAstc6x6SrgbBlock : return "Astc6x6SrgbBlock";
+      case Format::eAstc8x5UnormBlock : return "Astc8x5UnormBlock";
+      case Format::eAstc8x5SrgbBlock : return "Astc8x5SrgbBlock";
+      case Format::eAstc8x6UnormBlock : return "Astc8x6UnormBlock";
+      case Format::eAstc8x6SrgbBlock : return "Astc8x6SrgbBlock";
+      case Format::eAstc8x8UnormBlock : return "Astc8x8UnormBlock";
+      case Format::eAstc8x8SrgbBlock : return "Astc8x8SrgbBlock";
+      case Format::eAstc10x5UnormBlock : return "Astc10x5UnormBlock";
+      case Format::eAstc10x5SrgbBlock : return "Astc10x5SrgbBlock";
+      case Format::eAstc10x6UnormBlock : return "Astc10x6UnormBlock";
+      case Format::eAstc10x6SrgbBlock : return "Astc10x6SrgbBlock";
+      case Format::eAstc10x8UnormBlock : return "Astc10x8UnormBlock";
+      case Format::eAstc10x8SrgbBlock : return "Astc10x8SrgbBlock";
+      case Format::eAstc10x10UnormBlock : return "Astc10x10UnormBlock";
+      case Format::eAstc10x10SrgbBlock : return "Astc10x10SrgbBlock";
+      case Format::eAstc12x10UnormBlock : return "Astc12x10UnormBlock";
+      case Format::eAstc12x10SrgbBlock : return "Astc12x10SrgbBlock";
+      case Format::eAstc12x12UnormBlock : return "Astc12x12UnormBlock";
+      case Format::eAstc12x12SrgbBlock : return "Astc12x12SrgbBlock";
+      case Format::eG8B8G8R8422Unorm : return "G8B8G8R8422Unorm";
+      case Format::eB8G8R8G8422Unorm : return "B8G8R8G8422Unorm";
+      case Format::eG8B8R83Plane420Unorm : return "G8B8R83Plane420Unorm";
+      case Format::eG8B8R82Plane420Unorm : return "G8B8R82Plane420Unorm";
+      case Format::eG8B8R83Plane422Unorm : return "G8B8R83Plane422Unorm";
+      case Format::eG8B8R82Plane422Unorm : return "G8B8R82Plane422Unorm";
+      case Format::eG8B8R83Plane444Unorm : return "G8B8R83Plane444Unorm";
+      case Format::eR10X6UnormPack16 : return "R10X6UnormPack16";
+      case Format::eR10X6G10X6Unorm2Pack16 : return "R10X6G10X6Unorm2Pack16";
+      case Format::eR10X6G10X6B10X6A10X6Unorm4Pack16 : return "R10X6G10X6B10X6A10X6Unorm4Pack16";
+      case Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16 : return "G10X6B10X6G10X6R10X6422Unorm4Pack16";
+      case Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16 : return "B10X6G10X6R10X6G10X6422Unorm4Pack16";
+      case Format::eG10X6B10X6R10X63Plane420Unorm3Pack16 : return "G10X6B10X6R10X63Plane420Unorm3Pack16";
+      case Format::eG10X6B10X6R10X62Plane420Unorm3Pack16 : return "G10X6B10X6R10X62Plane420Unorm3Pack16";
+      case Format::eG10X6B10X6R10X63Plane422Unorm3Pack16 : return "G10X6B10X6R10X63Plane422Unorm3Pack16";
+      case Format::eG10X6B10X6R10X62Plane422Unorm3Pack16 : return "G10X6B10X6R10X62Plane422Unorm3Pack16";
+      case Format::eG10X6B10X6R10X63Plane444Unorm3Pack16 : return "G10X6B10X6R10X63Plane444Unorm3Pack16";
+      case Format::eR12X4UnormPack16 : return "R12X4UnormPack16";
+      case Format::eR12X4G12X4Unorm2Pack16 : return "R12X4G12X4Unorm2Pack16";
+      case Format::eR12X4G12X4B12X4A12X4Unorm4Pack16 : return "R12X4G12X4B12X4A12X4Unorm4Pack16";
+      case Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16 : return "G12X4B12X4G12X4R12X4422Unorm4Pack16";
+      case Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16 : return "B12X4G12X4R12X4G12X4422Unorm4Pack16";
+      case Format::eG12X4B12X4R12X43Plane420Unorm3Pack16 : return "G12X4B12X4R12X43Plane420Unorm3Pack16";
+      case Format::eG12X4B12X4R12X42Plane420Unorm3Pack16 : return "G12X4B12X4R12X42Plane420Unorm3Pack16";
+      case Format::eG12X4B12X4R12X43Plane422Unorm3Pack16 : return "G12X4B12X4R12X43Plane422Unorm3Pack16";
+      case Format::eG12X4B12X4R12X42Plane422Unorm3Pack16 : return "G12X4B12X4R12X42Plane422Unorm3Pack16";
+      case Format::eG12X4B12X4R12X43Plane444Unorm3Pack16 : return "G12X4B12X4R12X43Plane444Unorm3Pack16";
+      case Format::eG16B16G16R16422Unorm : return "G16B16G16R16422Unorm";
+      case Format::eB16G16R16G16422Unorm : return "B16G16R16G16422Unorm";
+      case Format::eG16B16R163Plane420Unorm : return "G16B16R163Plane420Unorm";
+      case Format::eG16B16R162Plane420Unorm : return "G16B16R162Plane420Unorm";
+      case Format::eG16B16R163Plane422Unorm : return "G16B16R163Plane422Unorm";
+      case Format::eG16B16R162Plane422Unorm : return "G16B16R162Plane422Unorm";
+      case Format::eG16B16R163Plane444Unorm : return "G16B16R163Plane444Unorm";
+      case Format::ePvrtc12BppUnormBlockIMG : return "Pvrtc12BppUnormBlockIMG";
+      case Format::ePvrtc14BppUnormBlockIMG : return "Pvrtc14BppUnormBlockIMG";
+      case Format::ePvrtc22BppUnormBlockIMG : return "Pvrtc22BppUnormBlockIMG";
+      case Format::ePvrtc24BppUnormBlockIMG : return "Pvrtc24BppUnormBlockIMG";
+      case Format::ePvrtc12BppSrgbBlockIMG : return "Pvrtc12BppSrgbBlockIMG";
+      case Format::ePvrtc14BppSrgbBlockIMG : return "Pvrtc14BppSrgbBlockIMG";
+      case Format::ePvrtc22BppSrgbBlockIMG : return "Pvrtc22BppSrgbBlockIMG";
+      case Format::ePvrtc24BppSrgbBlockIMG : return "Pvrtc24BppSrgbBlockIMG";
+      case Format::eAstc4x4SfloatBlockEXT : return "Astc4x4SfloatBlockEXT";
+      case Format::eAstc5x4SfloatBlockEXT : return "Astc5x4SfloatBlockEXT";
+      case Format::eAstc5x5SfloatBlockEXT : return "Astc5x5SfloatBlockEXT";
+      case Format::eAstc6x5SfloatBlockEXT : return "Astc6x5SfloatBlockEXT";
+      case Format::eAstc6x6SfloatBlockEXT : return "Astc6x6SfloatBlockEXT";
+      case Format::eAstc8x5SfloatBlockEXT : return "Astc8x5SfloatBlockEXT";
+      case Format::eAstc8x6SfloatBlockEXT : return "Astc8x6SfloatBlockEXT";
+      case Format::eAstc8x8SfloatBlockEXT : return "Astc8x8SfloatBlockEXT";
+      case Format::eAstc10x5SfloatBlockEXT : return "Astc10x5SfloatBlockEXT";
+      case Format::eAstc10x6SfloatBlockEXT : return "Astc10x6SfloatBlockEXT";
+      case Format::eAstc10x8SfloatBlockEXT : return "Astc10x8SfloatBlockEXT";
+      case Format::eAstc10x10SfloatBlockEXT : return "Astc10x10SfloatBlockEXT";
+      case Format::eAstc12x10SfloatBlockEXT : return "Astc12x10SfloatBlockEXT";
+      case Format::eAstc12x12SfloatBlockEXT : return "Astc12x12SfloatBlockEXT";
+      case Format::eA4R4G4B4UnormPack16EXT : return "A4R4G4B4UnormPack16EXT";
+      case Format::eA4B4G4R4UnormPack16EXT : return "A4B4G4R4UnormPack16EXT";
+      default: return "invalid";
     }
+  }
 
-    VertexInputAttributeDescription( VkVertexInputAttributeDescription const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( VertexInputAttributeDescription ) );
-    }
-
-    VertexInputAttributeDescription& operator=( VkVertexInputAttributeDescription const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( VertexInputAttributeDescription ) );
-      return *this;
-    }
-    VertexInputAttributeDescription& setLocation( uint32_t location_ )
-    {
-      location = location_;
-      return *this;
-    }
-
-    VertexInputAttributeDescription& setBinding( uint32_t binding_ )
-    {
-      binding = binding_;
-      return *this;
-    }
-
-    VertexInputAttributeDescription& setFormat( Format format_ )
-    {
-      format = format_;
-      return *this;
-    }
-
-    VertexInputAttributeDescription& setOffset( uint32_t offset_ )
-    {
-      offset = offset_;
-      return *this;
-    }
-
-    operator VkVertexInputAttributeDescription const&() const
-    {
-      return *reinterpret_cast<const VkVertexInputAttributeDescription*>(this);
-    }
-
-    operator VkVertexInputAttributeDescription &()
-    {
-      return *reinterpret_cast<VkVertexInputAttributeDescription*>(this);
-    }
-
-    bool operator==( VertexInputAttributeDescription const& rhs ) const
-    {
-      return ( location == rhs.location )
-          && ( binding == rhs.binding )
-          && ( format == rhs.format )
-          && ( offset == rhs.offset );
-    }
-
-    bool operator!=( VertexInputAttributeDescription const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    uint32_t location;
-    uint32_t binding;
-    Format format;
-    uint32_t offset;
+  enum class FormatFeatureFlagBits : VkFormatFeatureFlags
+  {
+    eSampledImage = VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT,
+    eStorageImage = VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT,
+    eStorageImageAtomic = VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT,
+    eUniformTexelBuffer = VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT,
+    eStorageTexelBuffer = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT,
+    eStorageTexelBufferAtomic = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT,
+    eVertexBuffer = VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT,
+    eColorAttachment = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT,
+    eColorAttachmentBlend = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT,
+    eDepthStencilAttachment = VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT,
+    eBlitSrc = VK_FORMAT_FEATURE_BLIT_SRC_BIT,
+    eBlitDst = VK_FORMAT_FEATURE_BLIT_DST_BIT,
+    eSampledImageFilterLinear = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT,
+    eTransferSrc = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT,
+    eTransferDst = VK_FORMAT_FEATURE_TRANSFER_DST_BIT,
+    eMidpointChromaSamples = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT,
+    eSampledImageYcbcrConversionLinearFilter = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT,
+    eSampledImageYcbcrConversionSeparateReconstructionFilter = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT,
+    eSampledImageYcbcrConversionChromaReconstructionExplicit = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT,
+    eSampledImageYcbcrConversionChromaReconstructionExplicitForceable = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT,
+    eDisjoint = VK_FORMAT_FEATURE_DISJOINT_BIT,
+    eCositedChromaSamples = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT,
+    eSampledImageFilterMinmax = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT,
+    eSampledImageFilterCubicIMG = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG,
+    eAccelerationStructureVertexBufferKHR = VK_FORMAT_FEATURE_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR,
+    eFragmentDensityMapEXT = VK_FORMAT_FEATURE_FRAGMENT_DENSITY_MAP_BIT_EXT,
+    eFragmentShadingRateAttachmentKHR = VK_FORMAT_FEATURE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR,
+    eCositedChromaSamplesKHR = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT_KHR,
+    eDisjointKHR = VK_FORMAT_FEATURE_DISJOINT_BIT_KHR,
+    eMidpointChromaSamplesKHR = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT_KHR,
+    eSampledImageFilterCubicEXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT,
+    eSampledImageFilterMinmaxEXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT,
+    eSampledImageYcbcrConversionChromaReconstructionExplicitKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT_KHR,
+    eSampledImageYcbcrConversionChromaReconstructionExplicitForceableKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT_KHR,
+    eSampledImageYcbcrConversionLinearFilterKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR,
+    eSampledImageYcbcrConversionSeparateReconstructionFilterKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR,
+    eTransferDstKHR = VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR,
+    eTransferSrcKHR = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR
   };
-  static_assert( sizeof( VertexInputAttributeDescription ) == sizeof( VkVertexInputAttributeDescription ), "struct and wrapper have different size!" );
+
+  VULKAN_HPP_INLINE std::string to_string( FormatFeatureFlagBits value )
+  {
+    switch ( value )
+    {
+      case FormatFeatureFlagBits::eSampledImage : return "SampledImage";
+      case FormatFeatureFlagBits::eStorageImage : return "StorageImage";
+      case FormatFeatureFlagBits::eStorageImageAtomic : return "StorageImageAtomic";
+      case FormatFeatureFlagBits::eUniformTexelBuffer : return "UniformTexelBuffer";
+      case FormatFeatureFlagBits::eStorageTexelBuffer : return "StorageTexelBuffer";
+      case FormatFeatureFlagBits::eStorageTexelBufferAtomic : return "StorageTexelBufferAtomic";
+      case FormatFeatureFlagBits::eVertexBuffer : return "VertexBuffer";
+      case FormatFeatureFlagBits::eColorAttachment : return "ColorAttachment";
+      case FormatFeatureFlagBits::eColorAttachmentBlend : return "ColorAttachmentBlend";
+      case FormatFeatureFlagBits::eDepthStencilAttachment : return "DepthStencilAttachment";
+      case FormatFeatureFlagBits::eBlitSrc : return "BlitSrc";
+      case FormatFeatureFlagBits::eBlitDst : return "BlitDst";
+      case FormatFeatureFlagBits::eSampledImageFilterLinear : return "SampledImageFilterLinear";
+      case FormatFeatureFlagBits::eTransferSrc : return "TransferSrc";
+      case FormatFeatureFlagBits::eTransferDst : return "TransferDst";
+      case FormatFeatureFlagBits::eMidpointChromaSamples : return "MidpointChromaSamples";
+      case FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter : return "SampledImageYcbcrConversionLinearFilter";
+      case FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter : return "SampledImageYcbcrConversionSeparateReconstructionFilter";
+      case FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit : return "SampledImageYcbcrConversionChromaReconstructionExplicit";
+      case FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable : return "SampledImageYcbcrConversionChromaReconstructionExplicitForceable";
+      case FormatFeatureFlagBits::eDisjoint : return "Disjoint";
+      case FormatFeatureFlagBits::eCositedChromaSamples : return "CositedChromaSamples";
+      case FormatFeatureFlagBits::eSampledImageFilterMinmax : return "SampledImageFilterMinmax";
+      case FormatFeatureFlagBits::eSampledImageFilterCubicIMG : return "SampledImageFilterCubicIMG";
+      case FormatFeatureFlagBits::eAccelerationStructureVertexBufferKHR : return "AccelerationStructureVertexBufferKHR";
+      case FormatFeatureFlagBits::eFragmentDensityMapEXT : return "FragmentDensityMapEXT";
+      case FormatFeatureFlagBits::eFragmentShadingRateAttachmentKHR : return "FragmentShadingRateAttachmentKHR";
+      default: return "invalid";
+    }
+  }
+
+  enum class FragmentShadingRateCombinerOpKHR
+  {
+    eKeep = VK_FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR,
+    eReplace = VK_FRAGMENT_SHADING_RATE_COMBINER_OP_REPLACE_KHR,
+    eMin = VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MIN_KHR,
+    eMax = VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MAX_KHR,
+    eMul = VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MUL_KHR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( FragmentShadingRateCombinerOpKHR value )
+  {
+    switch ( value )
+    {
+      case FragmentShadingRateCombinerOpKHR::eKeep : return "Keep";
+      case FragmentShadingRateCombinerOpKHR::eReplace : return "Replace";
+      case FragmentShadingRateCombinerOpKHR::eMin : return "Min";
+      case FragmentShadingRateCombinerOpKHR::eMax : return "Max";
+      case FragmentShadingRateCombinerOpKHR::eMul : return "Mul";
+      default: return "invalid";
+    }
+  }
+
+  enum class FramebufferCreateFlagBits : VkFramebufferCreateFlags
+  {
+    eImageless = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT,
+    eImagelessKHR = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( FramebufferCreateFlagBits value )
+  {
+    switch ( value )
+    {
+      case FramebufferCreateFlagBits::eImageless : return "Imageless";
+      default: return "invalid";
+    }
+  }
+
+  enum class FrontFace
+  {
+    eCounterClockwise = VK_FRONT_FACE_COUNTER_CLOCKWISE,
+    eClockwise = VK_FRONT_FACE_CLOCKWISE
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( FrontFace value )
+  {
+    switch ( value )
+    {
+      case FrontFace::eCounterClockwise : return "CounterClockwise";
+      case FrontFace::eClockwise : return "Clockwise";
+      default: return "invalid";
+    }
+  }
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  enum class FullScreenExclusiveEXT
+  {
+    eDefault = VK_FULL_SCREEN_EXCLUSIVE_DEFAULT_EXT,
+    eAllowed = VK_FULL_SCREEN_EXCLUSIVE_ALLOWED_EXT,
+    eDisallowed = VK_FULL_SCREEN_EXCLUSIVE_DISALLOWED_EXT,
+    eApplicationControlled = VK_FULL_SCREEN_EXCLUSIVE_APPLICATION_CONTROLLED_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( FullScreenExclusiveEXT value )
+  {
+    switch ( value )
+    {
+      case FullScreenExclusiveEXT::eDefault : return "Default";
+      case FullScreenExclusiveEXT::eAllowed : return "Allowed";
+      case FullScreenExclusiveEXT::eDisallowed : return "Disallowed";
+      case FullScreenExclusiveEXT::eApplicationControlled : return "ApplicationControlled";
+      default: return "invalid";
+    }
+  }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  enum class GeometryFlagBitsKHR : VkGeometryFlagsKHR
+  {
+    eOpaque = VK_GEOMETRY_OPAQUE_BIT_KHR,
+    eNoDuplicateAnyHitInvocation = VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_KHR
+  };
+  using GeometryFlagBitsNV = GeometryFlagBitsKHR;
+
+  VULKAN_HPP_INLINE std::string to_string( GeometryFlagBitsKHR value )
+  {
+    switch ( value )
+    {
+      case GeometryFlagBitsKHR::eOpaque : return "Opaque";
+      case GeometryFlagBitsKHR::eNoDuplicateAnyHitInvocation : return "NoDuplicateAnyHitInvocation";
+      default: return "invalid";
+    }
+  }
+
+  enum class GeometryInstanceFlagBitsKHR : VkGeometryInstanceFlagsKHR
+  {
+    eTriangleFacingCullDisable = VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR,
+    eTriangleFrontCounterclockwise = VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_KHR,
+    eForceOpaque = VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_KHR,
+    eForceNoOpaque = VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_KHR,
+    eTriangleCullDisable = VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV
+  };
+  using GeometryInstanceFlagBitsNV = GeometryInstanceFlagBitsKHR;
+
+  VULKAN_HPP_INLINE std::string to_string( GeometryInstanceFlagBitsKHR value )
+  {
+    switch ( value )
+    {
+      case GeometryInstanceFlagBitsKHR::eTriangleFacingCullDisable : return "TriangleFacingCullDisable";
+      case GeometryInstanceFlagBitsKHR::eTriangleFrontCounterclockwise : return "TriangleFrontCounterclockwise";
+      case GeometryInstanceFlagBitsKHR::eForceOpaque : return "ForceOpaque";
+      case GeometryInstanceFlagBitsKHR::eForceNoOpaque : return "ForceNoOpaque";
+      default: return "invalid";
+    }
+  }
+
+  enum class GeometryTypeKHR
+  {
+    eTriangles = VK_GEOMETRY_TYPE_TRIANGLES_KHR,
+    eAabbs = VK_GEOMETRY_TYPE_AABBS_KHR,
+    eInstances = VK_GEOMETRY_TYPE_INSTANCES_KHR
+  };
+  using GeometryTypeNV = GeometryTypeKHR;
+
+  VULKAN_HPP_INLINE std::string to_string( GeometryTypeKHR value )
+  {
+    switch ( value )
+    {
+      case GeometryTypeKHR::eTriangles : return "Triangles";
+      case GeometryTypeKHR::eAabbs : return "Aabbs";
+      case GeometryTypeKHR::eInstances : return "Instances";
+      default: return "invalid";
+    }
+  }
+
+  enum class ImageAspectFlagBits : VkImageAspectFlags
+  {
+    eColor = VK_IMAGE_ASPECT_COLOR_BIT,
+    eDepth = VK_IMAGE_ASPECT_DEPTH_BIT,
+    eStencil = VK_IMAGE_ASPECT_STENCIL_BIT,
+    eMetadata = VK_IMAGE_ASPECT_METADATA_BIT,
+    ePlane0 = VK_IMAGE_ASPECT_PLANE_0_BIT,
+    ePlane1 = VK_IMAGE_ASPECT_PLANE_1_BIT,
+    ePlane2 = VK_IMAGE_ASPECT_PLANE_2_BIT,
+    eMemoryPlane0EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT,
+    eMemoryPlane1EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT,
+    eMemoryPlane2EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT,
+    eMemoryPlane3EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT,
+    ePlane0KHR = VK_IMAGE_ASPECT_PLANE_0_BIT_KHR,
+    ePlane1KHR = VK_IMAGE_ASPECT_PLANE_1_BIT_KHR,
+    ePlane2KHR = VK_IMAGE_ASPECT_PLANE_2_BIT_KHR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( ImageAspectFlagBits value )
+  {
+    switch ( value )
+    {
+      case ImageAspectFlagBits::eColor : return "Color";
+      case ImageAspectFlagBits::eDepth : return "Depth";
+      case ImageAspectFlagBits::eStencil : return "Stencil";
+      case ImageAspectFlagBits::eMetadata : return "Metadata";
+      case ImageAspectFlagBits::ePlane0 : return "Plane0";
+      case ImageAspectFlagBits::ePlane1 : return "Plane1";
+      case ImageAspectFlagBits::ePlane2 : return "Plane2";
+      case ImageAspectFlagBits::eMemoryPlane0EXT : return "MemoryPlane0EXT";
+      case ImageAspectFlagBits::eMemoryPlane1EXT : return "MemoryPlane1EXT";
+      case ImageAspectFlagBits::eMemoryPlane2EXT : return "MemoryPlane2EXT";
+      case ImageAspectFlagBits::eMemoryPlane3EXT : return "MemoryPlane3EXT";
+      default: return "invalid";
+    }
+  }
+
+  enum class ImageCreateFlagBits : VkImageCreateFlags
+  {
+    eSparseBinding = VK_IMAGE_CREATE_SPARSE_BINDING_BIT,
+    eSparseResidency = VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT,
+    eSparseAliased = VK_IMAGE_CREATE_SPARSE_ALIASED_BIT,
+    eMutableFormat = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT,
+    eCubeCompatible = VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT,
+    eAlias = VK_IMAGE_CREATE_ALIAS_BIT,
+    eSplitInstanceBindRegions = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT,
+    e2DArrayCompatible = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT,
+    eBlockTexelViewCompatible = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT,
+    eExtendedUsage = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT,
+    eProtected = VK_IMAGE_CREATE_PROTECTED_BIT,
+    eDisjoint = VK_IMAGE_CREATE_DISJOINT_BIT,
+    eCornerSampledNV = VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV,
+    eSampleLocationsCompatibleDepthEXT = VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT,
+    eSubsampledEXT = VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT,
+    e2DArrayCompatibleKHR = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR,
+    eAliasKHR = VK_IMAGE_CREATE_ALIAS_BIT_KHR,
+    eBlockTexelViewCompatibleKHR = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR,
+    eDisjointKHR = VK_IMAGE_CREATE_DISJOINT_BIT_KHR,
+    eExtendedUsageKHR = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR,
+    eSplitInstanceBindRegionsKHR = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( ImageCreateFlagBits value )
+  {
+    switch ( value )
+    {
+      case ImageCreateFlagBits::eSparseBinding : return "SparseBinding";
+      case ImageCreateFlagBits::eSparseResidency : return "SparseResidency";
+      case ImageCreateFlagBits::eSparseAliased : return "SparseAliased";
+      case ImageCreateFlagBits::eMutableFormat : return "MutableFormat";
+      case ImageCreateFlagBits::eCubeCompatible : return "CubeCompatible";
+      case ImageCreateFlagBits::eAlias : return "Alias";
+      case ImageCreateFlagBits::eSplitInstanceBindRegions : return "SplitInstanceBindRegions";
+      case ImageCreateFlagBits::e2DArrayCompatible : return "2DArrayCompatible";
+      case ImageCreateFlagBits::eBlockTexelViewCompatible : return "BlockTexelViewCompatible";
+      case ImageCreateFlagBits::eExtendedUsage : return "ExtendedUsage";
+      case ImageCreateFlagBits::eProtected : return "Protected";
+      case ImageCreateFlagBits::eDisjoint : return "Disjoint";
+      case ImageCreateFlagBits::eCornerSampledNV : return "CornerSampledNV";
+      case ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT : return "SampleLocationsCompatibleDepthEXT";
+      case ImageCreateFlagBits::eSubsampledEXT : return "SubsampledEXT";
+      default: return "invalid";
+    }
+  }
+
+  enum class ImageLayout
+  {
+    eUndefined = VK_IMAGE_LAYOUT_UNDEFINED,
+    eGeneral = VK_IMAGE_LAYOUT_GENERAL,
+    eColorAttachmentOptimal = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
+    eDepthStencilAttachmentOptimal = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
+    eDepthStencilReadOnlyOptimal = VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL,
+    eShaderReadOnlyOptimal = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
+    eTransferSrcOptimal = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
+    eTransferDstOptimal = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
+    ePreinitialized = VK_IMAGE_LAYOUT_PREINITIALIZED,
+    eDepthReadOnlyStencilAttachmentOptimal = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL,
+    eDepthAttachmentStencilReadOnlyOptimal = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL,
+    eDepthAttachmentOptimal = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL,
+    eDepthReadOnlyOptimal = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL,
+    eStencilAttachmentOptimal = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL,
+    eStencilReadOnlyOptimal = VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL,
+    ePresentSrcKHR = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
+    eSharedPresentKHR = VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR,
+    eShadingRateOptimalNV = VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV,
+    eFragmentDensityMapOptimalEXT = VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT,
+    eDepthAttachmentOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR,
+    eDepthAttachmentStencilReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR,
+    eDepthReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR,
+    eDepthReadOnlyStencilAttachmentOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR,
+    eFragmentShadingRateAttachmentOptimalKHR = VK_IMAGE_LAYOUT_FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR,
+    eStencilAttachmentOptimalKHR = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR,
+    eStencilReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( ImageLayout value )
+  {
+    switch ( value )
+    {
+      case ImageLayout::eUndefined : return "Undefined";
+      case ImageLayout::eGeneral : return "General";
+      case ImageLayout::eColorAttachmentOptimal : return "ColorAttachmentOptimal";
+      case ImageLayout::eDepthStencilAttachmentOptimal : return "DepthStencilAttachmentOptimal";
+      case ImageLayout::eDepthStencilReadOnlyOptimal : return "DepthStencilReadOnlyOptimal";
+      case ImageLayout::eShaderReadOnlyOptimal : return "ShaderReadOnlyOptimal";
+      case ImageLayout::eTransferSrcOptimal : return "TransferSrcOptimal";
+      case ImageLayout::eTransferDstOptimal : return "TransferDstOptimal";
+      case ImageLayout::ePreinitialized : return "Preinitialized";
+      case ImageLayout::eDepthReadOnlyStencilAttachmentOptimal : return "DepthReadOnlyStencilAttachmentOptimal";
+      case ImageLayout::eDepthAttachmentStencilReadOnlyOptimal : return "DepthAttachmentStencilReadOnlyOptimal";
+      case ImageLayout::eDepthAttachmentOptimal : return "DepthAttachmentOptimal";
+      case ImageLayout::eDepthReadOnlyOptimal : return "DepthReadOnlyOptimal";
+      case ImageLayout::eStencilAttachmentOptimal : return "StencilAttachmentOptimal";
+      case ImageLayout::eStencilReadOnlyOptimal : return "StencilReadOnlyOptimal";
+      case ImageLayout::ePresentSrcKHR : return "PresentSrcKHR";
+      case ImageLayout::eSharedPresentKHR : return "SharedPresentKHR";
+      case ImageLayout::eShadingRateOptimalNV : return "ShadingRateOptimalNV";
+      case ImageLayout::eFragmentDensityMapOptimalEXT : return "FragmentDensityMapOptimalEXT";
+      default: return "invalid";
+    }
+  }
+
+  enum class ImageTiling
+  {
+    eOptimal = VK_IMAGE_TILING_OPTIMAL,
+    eLinear = VK_IMAGE_TILING_LINEAR,
+    eDrmFormatModifierEXT = VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( ImageTiling value )
+  {
+    switch ( value )
+    {
+      case ImageTiling::eOptimal : return "Optimal";
+      case ImageTiling::eLinear : return "Linear";
+      case ImageTiling::eDrmFormatModifierEXT : return "DrmFormatModifierEXT";
+      default: return "invalid";
+    }
+  }
+
+  enum class ImageType
+  {
+    e1D = VK_IMAGE_TYPE_1D,
+    e2D = VK_IMAGE_TYPE_2D,
+    e3D = VK_IMAGE_TYPE_3D
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( ImageType value )
+  {
+    switch ( value )
+    {
+      case ImageType::e1D : return "1D";
+      case ImageType::e2D : return "2D";
+      case ImageType::e3D : return "3D";
+      default: return "invalid";
+    }
+  }
+
+  enum class ImageUsageFlagBits : VkImageUsageFlags
+  {
+    eTransferSrc = VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
+    eTransferDst = VK_IMAGE_USAGE_TRANSFER_DST_BIT,
+    eSampled = VK_IMAGE_USAGE_SAMPLED_BIT,
+    eStorage = VK_IMAGE_USAGE_STORAGE_BIT,
+    eColorAttachment = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
+    eDepthStencilAttachment = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT,
+    eTransientAttachment = VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT,
+    eInputAttachment = VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT,
+    eShadingRateImageNV = VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV,
+    eFragmentDensityMapEXT = VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT,
+    eFragmentShadingRateAttachmentKHR = VK_IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( ImageUsageFlagBits value )
+  {
+    switch ( value )
+    {
+      case ImageUsageFlagBits::eTransferSrc : return "TransferSrc";
+      case ImageUsageFlagBits::eTransferDst : return "TransferDst";
+      case ImageUsageFlagBits::eSampled : return "Sampled";
+      case ImageUsageFlagBits::eStorage : return "Storage";
+      case ImageUsageFlagBits::eColorAttachment : return "ColorAttachment";
+      case ImageUsageFlagBits::eDepthStencilAttachment : return "DepthStencilAttachment";
+      case ImageUsageFlagBits::eTransientAttachment : return "TransientAttachment";
+      case ImageUsageFlagBits::eInputAttachment : return "InputAttachment";
+      case ImageUsageFlagBits::eShadingRateImageNV : return "ShadingRateImageNV";
+      case ImageUsageFlagBits::eFragmentDensityMapEXT : return "FragmentDensityMapEXT";
+      default: return "invalid";
+    }
+  }
+
+  enum class ImageViewCreateFlagBits : VkImageViewCreateFlags
+  {
+    eFragmentDensityMapDynamicEXT = VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DYNAMIC_BIT_EXT,
+    eFragmentDensityMapDeferredEXT = VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DEFERRED_BIT_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( ImageViewCreateFlagBits value )
+  {
+    switch ( value )
+    {
+      case ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT : return "FragmentDensityMapDynamicEXT";
+      case ImageViewCreateFlagBits::eFragmentDensityMapDeferredEXT : return "FragmentDensityMapDeferredEXT";
+      default: return "invalid";
+    }
+  }
+
+  enum class ImageViewType
+  {
+    e1D = VK_IMAGE_VIEW_TYPE_1D,
+    e2D = VK_IMAGE_VIEW_TYPE_2D,
+    e3D = VK_IMAGE_VIEW_TYPE_3D,
+    eCube = VK_IMAGE_VIEW_TYPE_CUBE,
+    e1DArray = VK_IMAGE_VIEW_TYPE_1D_ARRAY,
+    e2DArray = VK_IMAGE_VIEW_TYPE_2D_ARRAY,
+    eCubeArray = VK_IMAGE_VIEW_TYPE_CUBE_ARRAY
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( ImageViewType value )
+  {
+    switch ( value )
+    {
+      case ImageViewType::e1D : return "1D";
+      case ImageViewType::e2D : return "2D";
+      case ImageViewType::e3D : return "3D";
+      case ImageViewType::eCube : return "Cube";
+      case ImageViewType::e1DArray : return "1DArray";
+      case ImageViewType::e2DArray : return "2DArray";
+      case ImageViewType::eCubeArray : return "CubeArray";
+      default: return "invalid";
+    }
+  }
+
+  enum class IndexType
+  {
+    eUint16 = VK_INDEX_TYPE_UINT16,
+    eUint32 = VK_INDEX_TYPE_UINT32,
+    eNoneKHR = VK_INDEX_TYPE_NONE_KHR,
+    eUint8EXT = VK_INDEX_TYPE_UINT8_EXT,
+    eNoneNV = VK_INDEX_TYPE_NONE_NV
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( IndexType value )
+  {
+    switch ( value )
+    {
+      case IndexType::eUint16 : return "Uint16";
+      case IndexType::eUint32 : return "Uint32";
+      case IndexType::eNoneKHR : return "NoneKHR";
+      case IndexType::eUint8EXT : return "Uint8EXT";
+      default: return "invalid";
+    }
+  }
+
+  enum class IndirectCommandsLayoutUsageFlagBitsNV : VkIndirectCommandsLayoutUsageFlagsNV
+  {
+    eExplicitPreprocess = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EXPLICIT_PREPROCESS_BIT_NV,
+    eIndexedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NV,
+    eUnorderedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NV
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( IndirectCommandsLayoutUsageFlagBitsNV value )
+  {
+    switch ( value )
+    {
+      case IndirectCommandsLayoutUsageFlagBitsNV::eExplicitPreprocess : return "ExplicitPreprocess";
+      case IndirectCommandsLayoutUsageFlagBitsNV::eIndexedSequences : return "IndexedSequences";
+      case IndirectCommandsLayoutUsageFlagBitsNV::eUnorderedSequences : return "UnorderedSequences";
+      default: return "invalid";
+    }
+  }
+
+  enum class IndirectCommandsTokenTypeNV
+  {
+    eShaderGroup = VK_INDIRECT_COMMANDS_TOKEN_TYPE_SHADER_GROUP_NV,
+    eStateFlags = VK_INDIRECT_COMMANDS_TOKEN_TYPE_STATE_FLAGS_NV,
+    eIndexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NV,
+    eVertexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NV,
+    ePushConstant = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NV,
+    eDrawIndexed = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NV,
+    eDraw = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NV,
+    eDrawTasks = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_TASKS_NV
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( IndirectCommandsTokenTypeNV value )
+  {
+    switch ( value )
+    {
+      case IndirectCommandsTokenTypeNV::eShaderGroup : return "ShaderGroup";
+      case IndirectCommandsTokenTypeNV::eStateFlags : return "StateFlags";
+      case IndirectCommandsTokenTypeNV::eIndexBuffer : return "IndexBuffer";
+      case IndirectCommandsTokenTypeNV::eVertexBuffer : return "VertexBuffer";
+      case IndirectCommandsTokenTypeNV::ePushConstant : return "PushConstant";
+      case IndirectCommandsTokenTypeNV::eDrawIndexed : return "DrawIndexed";
+      case IndirectCommandsTokenTypeNV::eDraw : return "Draw";
+      case IndirectCommandsTokenTypeNV::eDrawTasks : return "DrawTasks";
+      default: return "invalid";
+    }
+  }
+
+  enum class IndirectStateFlagBitsNV : VkIndirectStateFlagsNV
+  {
+    eFlagFrontface = VK_INDIRECT_STATE_FLAG_FRONTFACE_BIT_NV
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( IndirectStateFlagBitsNV value )
+  {
+    switch ( value )
+    {
+      case IndirectStateFlagBitsNV::eFlagFrontface : return "FlagFrontface";
+      default: return "invalid";
+    }
+  }
+
+  enum class InstanceCreateFlagBits
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( InstanceCreateFlagBits )
+  {
+    return "(void)";
+  }
+
+  enum class InternalAllocationType
+  {
+    eExecutable = VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( InternalAllocationType value )
+  {
+    switch ( value )
+    {
+      case InternalAllocationType::eExecutable : return "Executable";
+      default: return "invalid";
+    }
+  }
+
+  enum class LineRasterizationModeEXT
+  {
+    eDefault = VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT,
+    eRectangular = VK_LINE_RASTERIZATION_MODE_RECTANGULAR_EXT,
+    eBresenham = VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT,
+    eRectangularSmooth = VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( LineRasterizationModeEXT value )
+  {
+    switch ( value )
+    {
+      case LineRasterizationModeEXT::eDefault : return "Default";
+      case LineRasterizationModeEXT::eRectangular : return "Rectangular";
+      case LineRasterizationModeEXT::eBresenham : return "Bresenham";
+      case LineRasterizationModeEXT::eRectangularSmooth : return "RectangularSmooth";
+      default: return "invalid";
+    }
+  }
+
+  enum class LogicOp
+  {
+    eClear = VK_LOGIC_OP_CLEAR,
+    eAnd = VK_LOGIC_OP_AND,
+    eAndReverse = VK_LOGIC_OP_AND_REVERSE,
+    eCopy = VK_LOGIC_OP_COPY,
+    eAndInverted = VK_LOGIC_OP_AND_INVERTED,
+    eNoOp = VK_LOGIC_OP_NO_OP,
+    eXor = VK_LOGIC_OP_XOR,
+    eOr = VK_LOGIC_OP_OR,
+    eNor = VK_LOGIC_OP_NOR,
+    eEquivalent = VK_LOGIC_OP_EQUIVALENT,
+    eInvert = VK_LOGIC_OP_INVERT,
+    eOrReverse = VK_LOGIC_OP_OR_REVERSE,
+    eCopyInverted = VK_LOGIC_OP_COPY_INVERTED,
+    eOrInverted = VK_LOGIC_OP_OR_INVERTED,
+    eNand = VK_LOGIC_OP_NAND,
+    eSet = VK_LOGIC_OP_SET
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( LogicOp value )
+  {
+    switch ( value )
+    {
+      case LogicOp::eClear : return "Clear";
+      case LogicOp::eAnd : return "And";
+      case LogicOp::eAndReverse : return "AndReverse";
+      case LogicOp::eCopy : return "Copy";
+      case LogicOp::eAndInverted : return "AndInverted";
+      case LogicOp::eNoOp : return "NoOp";
+      case LogicOp::eXor : return "Xor";
+      case LogicOp::eOr : return "Or";
+      case LogicOp::eNor : return "Nor";
+      case LogicOp::eEquivalent : return "Equivalent";
+      case LogicOp::eInvert : return "Invert";
+      case LogicOp::eOrReverse : return "OrReverse";
+      case LogicOp::eCopyInverted : return "CopyInverted";
+      case LogicOp::eOrInverted : return "OrInverted";
+      case LogicOp::eNand : return "Nand";
+      case LogicOp::eSet : return "Set";
+      default: return "invalid";
+    }
+  }
+
+  enum class MemoryAllocateFlagBits : VkMemoryAllocateFlags
+  {
+    eDeviceMask = VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT,
+    eDeviceAddress = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT,
+    eDeviceAddressCaptureReplay = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT
+  };
+  using MemoryAllocateFlagBitsKHR = MemoryAllocateFlagBits;
+
+  VULKAN_HPP_INLINE std::string to_string( MemoryAllocateFlagBits value )
+  {
+    switch ( value )
+    {
+      case MemoryAllocateFlagBits::eDeviceMask : return "DeviceMask";
+      case MemoryAllocateFlagBits::eDeviceAddress : return "DeviceAddress";
+      case MemoryAllocateFlagBits::eDeviceAddressCaptureReplay : return "DeviceAddressCaptureReplay";
+      default: return "invalid";
+    }
+  }
+
+  enum class MemoryHeapFlagBits : VkMemoryHeapFlags
+  {
+    eDeviceLocal = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT,
+    eMultiInstance = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT,
+    eMultiInstanceKHR = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT_KHR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( MemoryHeapFlagBits value )
+  {
+    switch ( value )
+    {
+      case MemoryHeapFlagBits::eDeviceLocal : return "DeviceLocal";
+      case MemoryHeapFlagBits::eMultiInstance : return "MultiInstance";
+      default: return "invalid";
+    }
+  }
+
+  enum class MemoryOverallocationBehaviorAMD
+  {
+    eDefault = VK_MEMORY_OVERALLOCATION_BEHAVIOR_DEFAULT_AMD,
+    eAllowed = VK_MEMORY_OVERALLOCATION_BEHAVIOR_ALLOWED_AMD,
+    eDisallowed = VK_MEMORY_OVERALLOCATION_BEHAVIOR_DISALLOWED_AMD
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( MemoryOverallocationBehaviorAMD value )
+  {
+    switch ( value )
+    {
+      case MemoryOverallocationBehaviorAMD::eDefault : return "Default";
+      case MemoryOverallocationBehaviorAMD::eAllowed : return "Allowed";
+      case MemoryOverallocationBehaviorAMD::eDisallowed : return "Disallowed";
+      default: return "invalid";
+    }
+  }
+
+  enum class MemoryPropertyFlagBits : VkMemoryPropertyFlags
+  {
+    eDeviceLocal = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT,
+    eHostVisible = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT,
+    eHostCoherent = VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
+    eHostCached = VK_MEMORY_PROPERTY_HOST_CACHED_BIT,
+    eLazilyAllocated = VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT,
+    eProtected = VK_MEMORY_PROPERTY_PROTECTED_BIT,
+    eDeviceCoherentAMD = VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD,
+    eDeviceUncachedAMD = VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( MemoryPropertyFlagBits value )
+  {
+    switch ( value )
+    {
+      case MemoryPropertyFlagBits::eDeviceLocal : return "DeviceLocal";
+      case MemoryPropertyFlagBits::eHostVisible : return "HostVisible";
+      case MemoryPropertyFlagBits::eHostCoherent : return "HostCoherent";
+      case MemoryPropertyFlagBits::eHostCached : return "HostCached";
+      case MemoryPropertyFlagBits::eLazilyAllocated : return "LazilyAllocated";
+      case MemoryPropertyFlagBits::eProtected : return "Protected";
+      case MemoryPropertyFlagBits::eDeviceCoherentAMD : return "DeviceCoherentAMD";
+      case MemoryPropertyFlagBits::eDeviceUncachedAMD : return "DeviceUncachedAMD";
+      default: return "invalid";
+    }
+  }
+
+  enum class ObjectType
+  {
+    eUnknown = VK_OBJECT_TYPE_UNKNOWN,
+    eInstance = VK_OBJECT_TYPE_INSTANCE,
+    ePhysicalDevice = VK_OBJECT_TYPE_PHYSICAL_DEVICE,
+    eDevice = VK_OBJECT_TYPE_DEVICE,
+    eQueue = VK_OBJECT_TYPE_QUEUE,
+    eSemaphore = VK_OBJECT_TYPE_SEMAPHORE,
+    eCommandBuffer = VK_OBJECT_TYPE_COMMAND_BUFFER,
+    eFence = VK_OBJECT_TYPE_FENCE,
+    eDeviceMemory = VK_OBJECT_TYPE_DEVICE_MEMORY,
+    eBuffer = VK_OBJECT_TYPE_BUFFER,
+    eImage = VK_OBJECT_TYPE_IMAGE,
+    eEvent = VK_OBJECT_TYPE_EVENT,
+    eQueryPool = VK_OBJECT_TYPE_QUERY_POOL,
+    eBufferView = VK_OBJECT_TYPE_BUFFER_VIEW,
+    eImageView = VK_OBJECT_TYPE_IMAGE_VIEW,
+    eShaderModule = VK_OBJECT_TYPE_SHADER_MODULE,
+    ePipelineCache = VK_OBJECT_TYPE_PIPELINE_CACHE,
+    ePipelineLayout = VK_OBJECT_TYPE_PIPELINE_LAYOUT,
+    eRenderPass = VK_OBJECT_TYPE_RENDER_PASS,
+    ePipeline = VK_OBJECT_TYPE_PIPELINE,
+    eDescriptorSetLayout = VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT,
+    eSampler = VK_OBJECT_TYPE_SAMPLER,
+    eDescriptorPool = VK_OBJECT_TYPE_DESCRIPTOR_POOL,
+    eDescriptorSet = VK_OBJECT_TYPE_DESCRIPTOR_SET,
+    eFramebuffer = VK_OBJECT_TYPE_FRAMEBUFFER,
+    eCommandPool = VK_OBJECT_TYPE_COMMAND_POOL,
+    eSamplerYcbcrConversion = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION,
+    eDescriptorUpdateTemplate = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE,
+    eSurfaceKHR = VK_OBJECT_TYPE_SURFACE_KHR,
+    eSwapchainKHR = VK_OBJECT_TYPE_SWAPCHAIN_KHR,
+    eDisplayKHR = VK_OBJECT_TYPE_DISPLAY_KHR,
+    eDisplayModeKHR = VK_OBJECT_TYPE_DISPLAY_MODE_KHR,
+    eDebugReportCallbackEXT = VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT,
+    eDebugUtilsMessengerEXT = VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT,
+    eAccelerationStructureKHR = VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR,
+    eValidationCacheEXT = VK_OBJECT_TYPE_VALIDATION_CACHE_EXT,
+    ePerformanceConfigurationINTEL = VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL,
+    eDeferredOperationKHR = VK_OBJECT_TYPE_DEFERRED_OPERATION_KHR,
+    eIndirectCommandsLayoutNV = VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NV,
+    ePrivateDataSlotEXT = VK_OBJECT_TYPE_PRIVATE_DATA_SLOT_EXT,
+    eAccelerationStructureNV = VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV,
+    eDescriptorUpdateTemplateKHR = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR,
+    eSamplerYcbcrConversionKHR = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( ObjectType value )
+  {
+    switch ( value )
+    {
+      case ObjectType::eUnknown : return "Unknown";
+      case ObjectType::eInstance : return "Instance";
+      case ObjectType::ePhysicalDevice : return "PhysicalDevice";
+      case ObjectType::eDevice : return "Device";
+      case ObjectType::eQueue : return "Queue";
+      case ObjectType::eSemaphore : return "Semaphore";
+      case ObjectType::eCommandBuffer : return "CommandBuffer";
+      case ObjectType::eFence : return "Fence";
+      case ObjectType::eDeviceMemory : return "DeviceMemory";
+      case ObjectType::eBuffer : return "Buffer";
+      case ObjectType::eImage : return "Image";
+      case ObjectType::eEvent : return "Event";
+      case ObjectType::eQueryPool : return "QueryPool";
+      case ObjectType::eBufferView : return "BufferView";
+      case ObjectType::eImageView : return "ImageView";
+      case ObjectType::eShaderModule : return "ShaderModule";
+      case ObjectType::ePipelineCache : return "PipelineCache";
+      case ObjectType::ePipelineLayout : return "PipelineLayout";
+      case ObjectType::eRenderPass : return "RenderPass";
+      case ObjectType::ePipeline : return "Pipeline";
+      case ObjectType::eDescriptorSetLayout : return "DescriptorSetLayout";
+      case ObjectType::eSampler : return "Sampler";
+      case ObjectType::eDescriptorPool : return "DescriptorPool";
+      case ObjectType::eDescriptorSet : return "DescriptorSet";
+      case ObjectType::eFramebuffer : return "Framebuffer";
+      case ObjectType::eCommandPool : return "CommandPool";
+      case ObjectType::eSamplerYcbcrConversion : return "SamplerYcbcrConversion";
+      case ObjectType::eDescriptorUpdateTemplate : return "DescriptorUpdateTemplate";
+      case ObjectType::eSurfaceKHR : return "SurfaceKHR";
+      case ObjectType::eSwapchainKHR : return "SwapchainKHR";
+      case ObjectType::eDisplayKHR : return "DisplayKHR";
+      case ObjectType::eDisplayModeKHR : return "DisplayModeKHR";
+      case ObjectType::eDebugReportCallbackEXT : return "DebugReportCallbackEXT";
+      case ObjectType::eDebugUtilsMessengerEXT : return "DebugUtilsMessengerEXT";
+      case ObjectType::eAccelerationStructureKHR : return "AccelerationStructureKHR";
+      case ObjectType::eValidationCacheEXT : return "ValidationCacheEXT";
+      case ObjectType::ePerformanceConfigurationINTEL : return "PerformanceConfigurationINTEL";
+      case ObjectType::eDeferredOperationKHR : return "DeferredOperationKHR";
+      case ObjectType::eIndirectCommandsLayoutNV : return "IndirectCommandsLayoutNV";
+      case ObjectType::ePrivateDataSlotEXT : return "PrivateDataSlotEXT";
+      default: return "invalid";
+    }
+  }
+
+  template<ObjectType value>
+  struct cpp_type
+  {};
+
+  enum class PeerMemoryFeatureFlagBits : VkPeerMemoryFeatureFlags
+  {
+    eCopySrc = VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT,
+    eCopyDst = VK_PEER_MEMORY_FEATURE_COPY_DST_BIT,
+    eGenericSrc = VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT,
+    eGenericDst = VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT
+  };
+  using PeerMemoryFeatureFlagBitsKHR = PeerMemoryFeatureFlagBits;
+
+  VULKAN_HPP_INLINE std::string to_string( PeerMemoryFeatureFlagBits value )
+  {
+    switch ( value )
+    {
+      case PeerMemoryFeatureFlagBits::eCopySrc : return "CopySrc";
+      case PeerMemoryFeatureFlagBits::eCopyDst : return "CopyDst";
+      case PeerMemoryFeatureFlagBits::eGenericSrc : return "GenericSrc";
+      case PeerMemoryFeatureFlagBits::eGenericDst : return "GenericDst";
+      default: return "invalid";
+    }
+  }
+
+  enum class PerformanceConfigurationTypeINTEL
+  {
+    eCommandQueueMetricsDiscoveryActivated = VK_PERFORMANCE_CONFIGURATION_TYPE_COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED_INTEL
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( PerformanceConfigurationTypeINTEL value )
+  {
+    switch ( value )
+    {
+      case PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated : return "CommandQueueMetricsDiscoveryActivated";
+      default: return "invalid";
+    }
+  }
+
+  enum class PerformanceCounterDescriptionFlagBitsKHR : VkPerformanceCounterDescriptionFlagsKHR
+  {
+    ePerformanceImpacting = VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_KHR,
+    eConcurrentlyImpacted = VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_KHR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( PerformanceCounterDescriptionFlagBitsKHR value )
+  {
+    switch ( value )
+    {
+      case PerformanceCounterDescriptionFlagBitsKHR::ePerformanceImpacting : return "PerformanceImpacting";
+      case PerformanceCounterDescriptionFlagBitsKHR::eConcurrentlyImpacted : return "ConcurrentlyImpacted";
+      default: return "invalid";
+    }
+  }
+
+  enum class PerformanceCounterScopeKHR
+  {
+    eCommandBuffer = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_BUFFER_KHR,
+    eRenderPass = VK_PERFORMANCE_COUNTER_SCOPE_RENDER_PASS_KHR,
+    eCommand = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_KHR,
+    eVkQueryScopeCommandBuffer = VK_QUERY_SCOPE_COMMAND_BUFFER_KHR,
+    eVkQueryScopeCommand = VK_QUERY_SCOPE_COMMAND_KHR,
+    eVkQueryScopeRenderPass = VK_QUERY_SCOPE_RENDER_PASS_KHR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( PerformanceCounterScopeKHR value )
+  {
+    switch ( value )
+    {
+      case PerformanceCounterScopeKHR::eCommandBuffer : return "CommandBuffer";
+      case PerformanceCounterScopeKHR::eRenderPass : return "RenderPass";
+      case PerformanceCounterScopeKHR::eCommand : return "Command";
+      default: return "invalid";
+    }
+  }
+
+  enum class PerformanceCounterStorageKHR
+  {
+    eInt32 = VK_PERFORMANCE_COUNTER_STORAGE_INT32_KHR,
+    eInt64 = VK_PERFORMANCE_COUNTER_STORAGE_INT64_KHR,
+    eUint32 = VK_PERFORMANCE_COUNTER_STORAGE_UINT32_KHR,
+    eUint64 = VK_PERFORMANCE_COUNTER_STORAGE_UINT64_KHR,
+    eFloat32 = VK_PERFORMANCE_COUNTER_STORAGE_FLOAT32_KHR,
+    eFloat64 = VK_PERFORMANCE_COUNTER_STORAGE_FLOAT64_KHR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( PerformanceCounterStorageKHR value )
+  {
+    switch ( value )
+    {
+      case PerformanceCounterStorageKHR::eInt32 : return "Int32";
+      case PerformanceCounterStorageKHR::eInt64 : return "Int64";
+      case PerformanceCounterStorageKHR::eUint32 : return "Uint32";
+      case PerformanceCounterStorageKHR::eUint64 : return "Uint64";
+      case PerformanceCounterStorageKHR::eFloat32 : return "Float32";
+      case PerformanceCounterStorageKHR::eFloat64 : return "Float64";
+      default: return "invalid";
+    }
+  }
+
+  enum class PerformanceCounterUnitKHR
+  {
+    eGeneric = VK_PERFORMANCE_COUNTER_UNIT_GENERIC_KHR,
+    ePercentage = VK_PERFORMANCE_COUNTER_UNIT_PERCENTAGE_KHR,
+    eNanoseconds = VK_PERFORMANCE_COUNTER_UNIT_NANOSECONDS_KHR,
+    eBytes = VK_PERFORMANCE_COUNTER_UNIT_BYTES_KHR,
+    eBytesPerSecond = VK_PERFORMANCE_COUNTER_UNIT_BYTES_PER_SECOND_KHR,
+    eKelvin = VK_PERFORMANCE_COUNTER_UNIT_KELVIN_KHR,
+    eWatts = VK_PERFORMANCE_COUNTER_UNIT_WATTS_KHR,
+    eVolts = VK_PERFORMANCE_COUNTER_UNIT_VOLTS_KHR,
+    eAmps = VK_PERFORMANCE_COUNTER_UNIT_AMPS_KHR,
+    eHertz = VK_PERFORMANCE_COUNTER_UNIT_HERTZ_KHR,
+    eCycles = VK_PERFORMANCE_COUNTER_UNIT_CYCLES_KHR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( PerformanceCounterUnitKHR value )
+  {
+    switch ( value )
+    {
+      case PerformanceCounterUnitKHR::eGeneric : return "Generic";
+      case PerformanceCounterUnitKHR::ePercentage : return "Percentage";
+      case PerformanceCounterUnitKHR::eNanoseconds : return "Nanoseconds";
+      case PerformanceCounterUnitKHR::eBytes : return "Bytes";
+      case PerformanceCounterUnitKHR::eBytesPerSecond : return "BytesPerSecond";
+      case PerformanceCounterUnitKHR::eKelvin : return "Kelvin";
+      case PerformanceCounterUnitKHR::eWatts : return "Watts";
+      case PerformanceCounterUnitKHR::eVolts : return "Volts";
+      case PerformanceCounterUnitKHR::eAmps : return "Amps";
+      case PerformanceCounterUnitKHR::eHertz : return "Hertz";
+      case PerformanceCounterUnitKHR::eCycles : return "Cycles";
+      default: return "invalid";
+    }
+  }
+
+  enum class PerformanceOverrideTypeINTEL
+  {
+    eNullHardware = VK_PERFORMANCE_OVERRIDE_TYPE_NULL_HARDWARE_INTEL,
+    eFlushGpuCaches = VK_PERFORMANCE_OVERRIDE_TYPE_FLUSH_GPU_CACHES_INTEL
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( PerformanceOverrideTypeINTEL value )
+  {
+    switch ( value )
+    {
+      case PerformanceOverrideTypeINTEL::eNullHardware : return "NullHardware";
+      case PerformanceOverrideTypeINTEL::eFlushGpuCaches : return "FlushGpuCaches";
+      default: return "invalid";
+    }
+  }
+
+  enum class PerformanceParameterTypeINTEL
+  {
+    eHwCountersSupported = VK_PERFORMANCE_PARAMETER_TYPE_HW_COUNTERS_SUPPORTED_INTEL,
+    eStreamMarkerValidBits = VK_PERFORMANCE_PARAMETER_TYPE_STREAM_MARKER_VALID_BITS_INTEL
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( PerformanceParameterTypeINTEL value )
+  {
+    switch ( value )
+    {
+      case PerformanceParameterTypeINTEL::eHwCountersSupported : return "HwCountersSupported";
+      case PerformanceParameterTypeINTEL::eStreamMarkerValidBits : return "StreamMarkerValidBits";
+      default: return "invalid";
+    }
+  }
+
+  enum class PerformanceValueTypeINTEL
+  {
+    eUint32 = VK_PERFORMANCE_VALUE_TYPE_UINT32_INTEL,
+    eUint64 = VK_PERFORMANCE_VALUE_TYPE_UINT64_INTEL,
+    eFloat = VK_PERFORMANCE_VALUE_TYPE_FLOAT_INTEL,
+    eBool = VK_PERFORMANCE_VALUE_TYPE_BOOL_INTEL,
+    eString = VK_PERFORMANCE_VALUE_TYPE_STRING_INTEL
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( PerformanceValueTypeINTEL value )
+  {
+    switch ( value )
+    {
+      case PerformanceValueTypeINTEL::eUint32 : return "Uint32";
+      case PerformanceValueTypeINTEL::eUint64 : return "Uint64";
+      case PerformanceValueTypeINTEL::eFloat : return "Float";
+      case PerformanceValueTypeINTEL::eBool : return "Bool";
+      case PerformanceValueTypeINTEL::eString : return "String";
+      default: return "invalid";
+    }
+  }
+
+  enum class PhysicalDeviceType
+  {
+    eOther = VK_PHYSICAL_DEVICE_TYPE_OTHER,
+    eIntegratedGpu = VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU,
+    eDiscreteGpu = VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU,
+    eVirtualGpu = VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU,
+    eCpu = VK_PHYSICAL_DEVICE_TYPE_CPU
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( PhysicalDeviceType value )
+  {
+    switch ( value )
+    {
+      case PhysicalDeviceType::eOther : return "Other";
+      case PhysicalDeviceType::eIntegratedGpu : return "IntegratedGpu";
+      case PhysicalDeviceType::eDiscreteGpu : return "DiscreteGpu";
+      case PhysicalDeviceType::eVirtualGpu : return "VirtualGpu";
+      case PhysicalDeviceType::eCpu : return "Cpu";
+      default: return "invalid";
+    }
+  }
+
+  enum class PipelineBindPoint
+  {
+    eGraphics = VK_PIPELINE_BIND_POINT_GRAPHICS,
+    eCompute = VK_PIPELINE_BIND_POINT_COMPUTE,
+    eRayTracingKHR = VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR,
+    eRayTracingNV = VK_PIPELINE_BIND_POINT_RAY_TRACING_NV
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineBindPoint value )
+  {
+    switch ( value )
+    {
+      case PipelineBindPoint::eGraphics : return "Graphics";
+      case PipelineBindPoint::eCompute : return "Compute";
+      case PipelineBindPoint::eRayTracingKHR : return "RayTracingKHR";
+      default: return "invalid";
+    }
+  }
+
+  enum class PipelineCacheCreateFlagBits : VkPipelineCacheCreateFlags
+  {
+    eExternallySynchronizedEXT = VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineCacheCreateFlagBits value )
+  {
+    switch ( value )
+    {
+      case PipelineCacheCreateFlagBits::eExternallySynchronizedEXT : return "ExternallySynchronizedEXT";
+      default: return "invalid";
+    }
+  }
+
+  enum class PipelineCacheHeaderVersion
+  {
+    eOne = VK_PIPELINE_CACHE_HEADER_VERSION_ONE
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineCacheHeaderVersion value )
+  {
+    switch ( value )
+    {
+      case PipelineCacheHeaderVersion::eOne : return "One";
+      default: return "invalid";
+    }
+  }
+
+  enum class PipelineCompilerControlFlagBitsAMD : VkPipelineCompilerControlFlagsAMD
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineCompilerControlFlagBitsAMD )
+  {
+    return "(void)";
+  }
+
+  enum class PipelineCreateFlagBits : VkPipelineCreateFlags
+  {
+    eDisableOptimization = VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT,
+    eAllowDerivatives = VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT,
+    eDerivative = VK_PIPELINE_CREATE_DERIVATIVE_BIT,
+    eViewIndexFromDeviceIndex = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT,
+    eDispatchBase = VK_PIPELINE_CREATE_DISPATCH_BASE_BIT,
+    eRayTracingNoNullAnyHitShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR,
+    eRayTracingNoNullClosestHitShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR,
+    eRayTracingNoNullMissShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR,
+    eRayTracingNoNullIntersectionShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR,
+    eRayTracingSkipTrianglesKHR = VK_PIPELINE_CREATE_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR,
+    eRayTracingSkipAabbsKHR = VK_PIPELINE_CREATE_RAY_TRACING_SKIP_AABBS_BIT_KHR,
+    eDeferCompileNV = VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV,
+    eCaptureStatisticsKHR = VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR,
+    eCaptureInternalRepresentationsKHR = VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR,
+    eIndirectBindableNV = VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV,
+    eLibraryKHR = VK_PIPELINE_CREATE_LIBRARY_BIT_KHR,
+    eFailOnPipelineCompileRequiredEXT = VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_EXT,
+    eEarlyReturnOnFailureEXT = VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT,
+    eDispatchBaseKHR = VK_PIPELINE_CREATE_DISPATCH_BASE_KHR,
+    eViewIndexFromDeviceIndexKHR = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineCreateFlagBits value )
+  {
+    switch ( value )
+    {
+      case PipelineCreateFlagBits::eDisableOptimization : return "DisableOptimization";
+      case PipelineCreateFlagBits::eAllowDerivatives : return "AllowDerivatives";
+      case PipelineCreateFlagBits::eDerivative : return "Derivative";
+      case PipelineCreateFlagBits::eViewIndexFromDeviceIndex : return "ViewIndexFromDeviceIndex";
+      case PipelineCreateFlagBits::eDispatchBase : return "DispatchBase";
+      case PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR : return "RayTracingNoNullAnyHitShadersKHR";
+      case PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR : return "RayTracingNoNullClosestHitShadersKHR";
+      case PipelineCreateFlagBits::eRayTracingNoNullMissShadersKHR : return "RayTracingNoNullMissShadersKHR";
+      case PipelineCreateFlagBits::eRayTracingNoNullIntersectionShadersKHR : return "RayTracingNoNullIntersectionShadersKHR";
+      case PipelineCreateFlagBits::eRayTracingSkipTrianglesKHR : return "RayTracingSkipTrianglesKHR";
+      case PipelineCreateFlagBits::eRayTracingSkipAabbsKHR : return "RayTracingSkipAabbsKHR";
+      case PipelineCreateFlagBits::eDeferCompileNV : return "DeferCompileNV";
+      case PipelineCreateFlagBits::eCaptureStatisticsKHR : return "CaptureStatisticsKHR";
+      case PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR : return "CaptureInternalRepresentationsKHR";
+      case PipelineCreateFlagBits::eIndirectBindableNV : return "IndirectBindableNV";
+      case PipelineCreateFlagBits::eLibraryKHR : return "LibraryKHR";
+      case PipelineCreateFlagBits::eFailOnPipelineCompileRequiredEXT : return "FailOnPipelineCompileRequiredEXT";
+      case PipelineCreateFlagBits::eEarlyReturnOnFailureEXT : return "EarlyReturnOnFailureEXT";
+      default: return "invalid";
+    }
+  }
+
+  enum class PipelineCreationFeedbackFlagBitsEXT : VkPipelineCreationFeedbackFlagsEXT
+  {
+    eValid = VK_PIPELINE_CREATION_FEEDBACK_VALID_BIT_EXT,
+    eApplicationPipelineCacheHit = VK_PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT_EXT,
+    eBasePipelineAcceleration = VK_PIPELINE_CREATION_FEEDBACK_BASE_PIPELINE_ACCELERATION_BIT_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineCreationFeedbackFlagBitsEXT value )
+  {
+    switch ( value )
+    {
+      case PipelineCreationFeedbackFlagBitsEXT::eValid : return "Valid";
+      case PipelineCreationFeedbackFlagBitsEXT::eApplicationPipelineCacheHit : return "ApplicationPipelineCacheHit";
+      case PipelineCreationFeedbackFlagBitsEXT::eBasePipelineAcceleration : return "BasePipelineAcceleration";
+      default: return "invalid";
+    }
+  }
+
+  enum class PipelineExecutableStatisticFormatKHR
+  {
+    eBool32 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_BOOL32_KHR,
+    eInt64 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_INT64_KHR,
+    eUint64 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_UINT64_KHR,
+    eFloat64 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_FLOAT64_KHR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineExecutableStatisticFormatKHR value )
+  {
+    switch ( value )
+    {
+      case PipelineExecutableStatisticFormatKHR::eBool32 : return "Bool32";
+      case PipelineExecutableStatisticFormatKHR::eInt64 : return "Int64";
+      case PipelineExecutableStatisticFormatKHR::eUint64 : return "Uint64";
+      case PipelineExecutableStatisticFormatKHR::eFloat64 : return "Float64";
+      default: return "invalid";
+    }
+  }
+
+  enum class PipelineShaderStageCreateFlagBits : VkPipelineShaderStageCreateFlags
+  {
+    eAllowVaryingSubgroupSizeEXT = VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT,
+    eRequireFullSubgroupsEXT = VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineShaderStageCreateFlagBits value )
+  {
+    switch ( value )
+    {
+      case PipelineShaderStageCreateFlagBits::eAllowVaryingSubgroupSizeEXT : return "AllowVaryingSubgroupSizeEXT";
+      case PipelineShaderStageCreateFlagBits::eRequireFullSubgroupsEXT : return "RequireFullSubgroupsEXT";
+      default: return "invalid";
+    }
+  }
+
+  enum class PipelineStageFlagBits : VkPipelineStageFlags
+  {
+    eTopOfPipe = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
+    eDrawIndirect = VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT,
+    eVertexInput = VK_PIPELINE_STAGE_VERTEX_INPUT_BIT,
+    eVertexShader = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT,
+    eTessellationControlShader = VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT,
+    eTessellationEvaluationShader = VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT,
+    eGeometryShader = VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT,
+    eFragmentShader = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
+    eEarlyFragmentTests = VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT,
+    eLateFragmentTests = VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT,
+    eColorAttachmentOutput = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+    eComputeShader = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
+    eTransfer = VK_PIPELINE_STAGE_TRANSFER_BIT,
+    eBottomOfPipe = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
+    eHost = VK_PIPELINE_STAGE_HOST_BIT,
+    eAllGraphics = VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT,
+    eAllCommands = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
+    eTransformFeedbackEXT = VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT,
+    eConditionalRenderingEXT = VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT,
+    eRayTracingShaderKHR = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR,
+    eAccelerationStructureBuildKHR = VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_KHR,
+    eShadingRateImageNV = VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV,
+    eTaskShaderNV = VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV,
+    eMeshShaderNV = VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV,
+    eFragmentDensityProcessEXT = VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT,
+    eCommandPreprocessNV = VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_NV,
+    eAccelerationStructureBuildNV = VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV,
+    eFragmentShadingRateAttachmentKHR = VK_PIPELINE_STAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR,
+    eRayTracingShaderNV = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineStageFlagBits value )
+  {
+    switch ( value )
+    {
+      case PipelineStageFlagBits::eTopOfPipe : return "TopOfPipe";
+      case PipelineStageFlagBits::eDrawIndirect : return "DrawIndirect";
+      case PipelineStageFlagBits::eVertexInput : return "VertexInput";
+      case PipelineStageFlagBits::eVertexShader : return "VertexShader";
+      case PipelineStageFlagBits::eTessellationControlShader : return "TessellationControlShader";
+      case PipelineStageFlagBits::eTessellationEvaluationShader : return "TessellationEvaluationShader";
+      case PipelineStageFlagBits::eGeometryShader : return "GeometryShader";
+      case PipelineStageFlagBits::eFragmentShader : return "FragmentShader";
+      case PipelineStageFlagBits::eEarlyFragmentTests : return "EarlyFragmentTests";
+      case PipelineStageFlagBits::eLateFragmentTests : return "LateFragmentTests";
+      case PipelineStageFlagBits::eColorAttachmentOutput : return "ColorAttachmentOutput";
+      case PipelineStageFlagBits::eComputeShader : return "ComputeShader";
+      case PipelineStageFlagBits::eTransfer : return "Transfer";
+      case PipelineStageFlagBits::eBottomOfPipe : return "BottomOfPipe";
+      case PipelineStageFlagBits::eHost : return "Host";
+      case PipelineStageFlagBits::eAllGraphics : return "AllGraphics";
+      case PipelineStageFlagBits::eAllCommands : return "AllCommands";
+      case PipelineStageFlagBits::eTransformFeedbackEXT : return "TransformFeedbackEXT";
+      case PipelineStageFlagBits::eConditionalRenderingEXT : return "ConditionalRenderingEXT";
+      case PipelineStageFlagBits::eRayTracingShaderKHR : return "RayTracingShaderKHR";
+      case PipelineStageFlagBits::eAccelerationStructureBuildKHR : return "AccelerationStructureBuildKHR";
+      case PipelineStageFlagBits::eShadingRateImageNV : return "ShadingRateImageNV";
+      case PipelineStageFlagBits::eTaskShaderNV : return "TaskShaderNV";
+      case PipelineStageFlagBits::eMeshShaderNV : return "MeshShaderNV";
+      case PipelineStageFlagBits::eFragmentDensityProcessEXT : return "FragmentDensityProcessEXT";
+      case PipelineStageFlagBits::eCommandPreprocessNV : return "CommandPreprocessNV";
+      default: return "invalid";
+    }
+  }
+
+  enum class PointClippingBehavior
+  {
+    eAllClipPlanes = VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES,
+    eUserClipPlanesOnly = VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY
+  };
+  using PointClippingBehaviorKHR = PointClippingBehavior;
+
+  VULKAN_HPP_INLINE std::string to_string( PointClippingBehavior value )
+  {
+    switch ( value )
+    {
+      case PointClippingBehavior::eAllClipPlanes : return "AllClipPlanes";
+      case PointClippingBehavior::eUserClipPlanesOnly : return "UserClipPlanesOnly";
+      default: return "invalid";
+    }
+  }
+
+  enum class PolygonMode
+  {
+    eFill = VK_POLYGON_MODE_FILL,
+    eLine = VK_POLYGON_MODE_LINE,
+    ePoint = VK_POLYGON_MODE_POINT,
+    eFillRectangleNV = VK_POLYGON_MODE_FILL_RECTANGLE_NV
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( PolygonMode value )
+  {
+    switch ( value )
+    {
+      case PolygonMode::eFill : return "Fill";
+      case PolygonMode::eLine : return "Line";
+      case PolygonMode::ePoint : return "Point";
+      case PolygonMode::eFillRectangleNV : return "FillRectangleNV";
+      default: return "invalid";
+    }
+  }
+
+  enum class PresentModeKHR
+  {
+    eImmediate = VK_PRESENT_MODE_IMMEDIATE_KHR,
+    eMailbox = VK_PRESENT_MODE_MAILBOX_KHR,
+    eFifo = VK_PRESENT_MODE_FIFO_KHR,
+    eFifoRelaxed = VK_PRESENT_MODE_FIFO_RELAXED_KHR,
+    eSharedDemandRefresh = VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR,
+    eSharedContinuousRefresh = VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( PresentModeKHR value )
+  {
+    switch ( value )
+    {
+      case PresentModeKHR::eImmediate : return "Immediate";
+      case PresentModeKHR::eMailbox : return "Mailbox";
+      case PresentModeKHR::eFifo : return "Fifo";
+      case PresentModeKHR::eFifoRelaxed : return "FifoRelaxed";
+      case PresentModeKHR::eSharedDemandRefresh : return "SharedDemandRefresh";
+      case PresentModeKHR::eSharedContinuousRefresh : return "SharedContinuousRefresh";
+      default: return "invalid";
+    }
+  }
+
+  enum class PrimitiveTopology
+  {
+    ePointList = VK_PRIMITIVE_TOPOLOGY_POINT_LIST,
+    eLineList = VK_PRIMITIVE_TOPOLOGY_LINE_LIST,
+    eLineStrip = VK_PRIMITIVE_TOPOLOGY_LINE_STRIP,
+    eTriangleList = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST,
+    eTriangleStrip = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP,
+    eTriangleFan = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN,
+    eLineListWithAdjacency = VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY,
+    eLineStripWithAdjacency = VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY,
+    eTriangleListWithAdjacency = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY,
+    eTriangleStripWithAdjacency = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY,
+    ePatchList = VK_PRIMITIVE_TOPOLOGY_PATCH_LIST
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( PrimitiveTopology value )
+  {
+    switch ( value )
+    {
+      case PrimitiveTopology::ePointList : return "PointList";
+      case PrimitiveTopology::eLineList : return "LineList";
+      case PrimitiveTopology::eLineStrip : return "LineStrip";
+      case PrimitiveTopology::eTriangleList : return "TriangleList";
+      case PrimitiveTopology::eTriangleStrip : return "TriangleStrip";
+      case PrimitiveTopology::eTriangleFan : return "TriangleFan";
+      case PrimitiveTopology::eLineListWithAdjacency : return "LineListWithAdjacency";
+      case PrimitiveTopology::eLineStripWithAdjacency : return "LineStripWithAdjacency";
+      case PrimitiveTopology::eTriangleListWithAdjacency : return "TriangleListWithAdjacency";
+      case PrimitiveTopology::eTriangleStripWithAdjacency : return "TriangleStripWithAdjacency";
+      case PrimitiveTopology::ePatchList : return "PatchList";
+      default: return "invalid";
+    }
+  }
+
+  enum class PrivateDataSlotCreateFlagBitsEXT : VkPrivateDataSlotCreateFlagsEXT
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( PrivateDataSlotCreateFlagBitsEXT )
+  {
+    return "(void)";
+  }
+
+  enum class QueryControlFlagBits : VkQueryControlFlags
+  {
+    ePrecise = VK_QUERY_CONTROL_PRECISE_BIT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( QueryControlFlagBits value )
+  {
+    switch ( value )
+    {
+      case QueryControlFlagBits::ePrecise : return "Precise";
+      default: return "invalid";
+    }
+  }
+
+  enum class QueryPipelineStatisticFlagBits : VkQueryPipelineStatisticFlags
+  {
+    eInputAssemblyVertices = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT,
+    eInputAssemblyPrimitives = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT,
+    eVertexShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT,
+    eGeometryShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT,
+    eGeometryShaderPrimitives = VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT,
+    eClippingInvocations = VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT,
+    eClippingPrimitives = VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT,
+    eFragmentShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT,
+    eTessellationControlShaderPatches = VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT,
+    eTessellationEvaluationShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT,
+    eComputeShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( QueryPipelineStatisticFlagBits value )
+  {
+    switch ( value )
+    {
+      case QueryPipelineStatisticFlagBits::eInputAssemblyVertices : return "InputAssemblyVertices";
+      case QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives : return "InputAssemblyPrimitives";
+      case QueryPipelineStatisticFlagBits::eVertexShaderInvocations : return "VertexShaderInvocations";
+      case QueryPipelineStatisticFlagBits::eGeometryShaderInvocations : return "GeometryShaderInvocations";
+      case QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives : return "GeometryShaderPrimitives";
+      case QueryPipelineStatisticFlagBits::eClippingInvocations : return "ClippingInvocations";
+      case QueryPipelineStatisticFlagBits::eClippingPrimitives : return "ClippingPrimitives";
+      case QueryPipelineStatisticFlagBits::eFragmentShaderInvocations : return "FragmentShaderInvocations";
+      case QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches : return "TessellationControlShaderPatches";
+      case QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations : return "TessellationEvaluationShaderInvocations";
+      case QueryPipelineStatisticFlagBits::eComputeShaderInvocations : return "ComputeShaderInvocations";
+      default: return "invalid";
+    }
+  }
+
+  enum class QueryPoolCreateFlagBits
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( QueryPoolCreateFlagBits )
+  {
+    return "(void)";
+  }
+
+  enum class QueryPoolSamplingModeINTEL
+  {
+    eManual = VK_QUERY_POOL_SAMPLING_MODE_MANUAL_INTEL
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( QueryPoolSamplingModeINTEL value )
+  {
+    switch ( value )
+    {
+      case QueryPoolSamplingModeINTEL::eManual : return "Manual";
+      default: return "invalid";
+    }
+  }
+
+  enum class QueryResultFlagBits : VkQueryResultFlags
+  {
+    e64 = VK_QUERY_RESULT_64_BIT,
+    eWait = VK_QUERY_RESULT_WAIT_BIT,
+    eWithAvailability = VK_QUERY_RESULT_WITH_AVAILABILITY_BIT,
+    ePartial = VK_QUERY_RESULT_PARTIAL_BIT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( QueryResultFlagBits value )
+  {
+    switch ( value )
+    {
+      case QueryResultFlagBits::e64 : return "64";
+      case QueryResultFlagBits::eWait : return "Wait";
+      case QueryResultFlagBits::eWithAvailability : return "WithAvailability";
+      case QueryResultFlagBits::ePartial : return "Partial";
+      default: return "invalid";
+    }
+  }
+
+  enum class QueryType
+  {
+    eOcclusion = VK_QUERY_TYPE_OCCLUSION,
+    ePipelineStatistics = VK_QUERY_TYPE_PIPELINE_STATISTICS,
+    eTimestamp = VK_QUERY_TYPE_TIMESTAMP,
+    eTransformFeedbackStreamEXT = VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT,
+    ePerformanceQueryKHR = VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR,
+    eAccelerationStructureCompactedSizeKHR = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR,
+    eAccelerationStructureSerializationSizeKHR = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_SIZE_KHR,
+    ePerformanceQueryINTEL = VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL,
+    eAccelerationStructureCompactedSizeNV = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( QueryType value )
+  {
+    switch ( value )
+    {
+      case QueryType::eOcclusion : return "Occlusion";
+      case QueryType::ePipelineStatistics : return "PipelineStatistics";
+      case QueryType::eTimestamp : return "Timestamp";
+      case QueryType::eTransformFeedbackStreamEXT : return "TransformFeedbackStreamEXT";
+      case QueryType::ePerformanceQueryKHR : return "PerformanceQueryKHR";
+      case QueryType::eAccelerationStructureCompactedSizeKHR : return "AccelerationStructureCompactedSizeKHR";
+      case QueryType::eAccelerationStructureSerializationSizeKHR : return "AccelerationStructureSerializationSizeKHR";
+      case QueryType::ePerformanceQueryINTEL : return "PerformanceQueryINTEL";
+      default: return "invalid";
+    }
+  }
+
+  enum class QueueFlagBits : VkQueueFlags
+  {
+    eGraphics = VK_QUEUE_GRAPHICS_BIT,
+    eCompute = VK_QUEUE_COMPUTE_BIT,
+    eTransfer = VK_QUEUE_TRANSFER_BIT,
+    eSparseBinding = VK_QUEUE_SPARSE_BINDING_BIT,
+    eProtected = VK_QUEUE_PROTECTED_BIT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( QueueFlagBits value )
+  {
+    switch ( value )
+    {
+      case QueueFlagBits::eGraphics : return "Graphics";
+      case QueueFlagBits::eCompute : return "Compute";
+      case QueueFlagBits::eTransfer : return "Transfer";
+      case QueueFlagBits::eSparseBinding : return "SparseBinding";
+      case QueueFlagBits::eProtected : return "Protected";
+      default: return "invalid";
+    }
+  }
+
+  enum class QueueGlobalPriorityEXT
+  {
+    eLow = VK_QUEUE_GLOBAL_PRIORITY_LOW_EXT,
+    eMedium = VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_EXT,
+    eHigh = VK_QUEUE_GLOBAL_PRIORITY_HIGH_EXT,
+    eRealtime = VK_QUEUE_GLOBAL_PRIORITY_REALTIME_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( QueueGlobalPriorityEXT value )
+  {
+    switch ( value )
+    {
+      case QueueGlobalPriorityEXT::eLow : return "Low";
+      case QueueGlobalPriorityEXT::eMedium : return "Medium";
+      case QueueGlobalPriorityEXT::eHigh : return "High";
+      case QueueGlobalPriorityEXT::eRealtime : return "Realtime";
+      default: return "invalid";
+    }
+  }
+
+  enum class RasterizationOrderAMD
+  {
+    eStrict = VK_RASTERIZATION_ORDER_STRICT_AMD,
+    eRelaxed = VK_RASTERIZATION_ORDER_RELAXED_AMD
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( RasterizationOrderAMD value )
+  {
+    switch ( value )
+    {
+      case RasterizationOrderAMD::eStrict : return "Strict";
+      case RasterizationOrderAMD::eRelaxed : return "Relaxed";
+      default: return "invalid";
+    }
+  }
+
+  enum class RayTracingShaderGroupTypeKHR
+  {
+    eGeneral = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR,
+    eTrianglesHitGroup = VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR,
+    eProceduralHitGroup = VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR
+  };
+  using RayTracingShaderGroupTypeNV = RayTracingShaderGroupTypeKHR;
+
+  VULKAN_HPP_INLINE std::string to_string( RayTracingShaderGroupTypeKHR value )
+  {
+    switch ( value )
+    {
+      case RayTracingShaderGroupTypeKHR::eGeneral : return "General";
+      case RayTracingShaderGroupTypeKHR::eTrianglesHitGroup : return "TrianglesHitGroup";
+      case RayTracingShaderGroupTypeKHR::eProceduralHitGroup : return "ProceduralHitGroup";
+      default: return "invalid";
+    }
+  }
+
+  enum class RenderPassCreateFlagBits : VkRenderPassCreateFlags
+  {
+    eTransformQCOM = VK_RENDER_PASS_CREATE_TRANSFORM_BIT_QCOM
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( RenderPassCreateFlagBits value )
+  {
+    switch ( value )
+    {
+      case RenderPassCreateFlagBits::eTransformQCOM : return "TransformQCOM";
+      default: return "invalid";
+    }
+  }
+
+  enum class ResolveModeFlagBits : VkResolveModeFlags
+  {
+    eNone = VK_RESOLVE_MODE_NONE,
+    eSampleZero = VK_RESOLVE_MODE_SAMPLE_ZERO_BIT,
+    eAverage = VK_RESOLVE_MODE_AVERAGE_BIT,
+    eMin = VK_RESOLVE_MODE_MIN_BIT,
+    eMax = VK_RESOLVE_MODE_MAX_BIT
+  };
+  using ResolveModeFlagBitsKHR = ResolveModeFlagBits;
+
+  VULKAN_HPP_INLINE std::string to_string( ResolveModeFlagBits value )
+  {
+    switch ( value )
+    {
+      case ResolveModeFlagBits::eNone : return "None";
+      case ResolveModeFlagBits::eSampleZero : return "SampleZero";
+      case ResolveModeFlagBits::eAverage : return "Average";
+      case ResolveModeFlagBits::eMin : return "Min";
+      case ResolveModeFlagBits::eMax : return "Max";
+      default: return "invalid";
+    }
+  }
+
+  enum class Result
+  {
+    eSuccess = VK_SUCCESS,
+    eNotReady = VK_NOT_READY,
+    eTimeout = VK_TIMEOUT,
+    eEventSet = VK_EVENT_SET,
+    eEventReset = VK_EVENT_RESET,
+    eIncomplete = VK_INCOMPLETE,
+    eErrorOutOfHostMemory = VK_ERROR_OUT_OF_HOST_MEMORY,
+    eErrorOutOfDeviceMemory = VK_ERROR_OUT_OF_DEVICE_MEMORY,
+    eErrorInitializationFailed = VK_ERROR_INITIALIZATION_FAILED,
+    eErrorDeviceLost = VK_ERROR_DEVICE_LOST,
+    eErrorMemoryMapFailed = VK_ERROR_MEMORY_MAP_FAILED,
+    eErrorLayerNotPresent = VK_ERROR_LAYER_NOT_PRESENT,
+    eErrorExtensionNotPresent = VK_ERROR_EXTENSION_NOT_PRESENT,
+    eErrorFeatureNotPresent = VK_ERROR_FEATURE_NOT_PRESENT,
+    eErrorIncompatibleDriver = VK_ERROR_INCOMPATIBLE_DRIVER,
+    eErrorTooManyObjects = VK_ERROR_TOO_MANY_OBJECTS,
+    eErrorFormatNotSupported = VK_ERROR_FORMAT_NOT_SUPPORTED,
+    eErrorFragmentedPool = VK_ERROR_FRAGMENTED_POOL,
+    eErrorUnknown = VK_ERROR_UNKNOWN,
+    eErrorOutOfPoolMemory = VK_ERROR_OUT_OF_POOL_MEMORY,
+    eErrorInvalidExternalHandle = VK_ERROR_INVALID_EXTERNAL_HANDLE,
+    eErrorFragmentation = VK_ERROR_FRAGMENTATION,
+    eErrorInvalidOpaqueCaptureAddress = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS,
+    eErrorSurfaceLostKHR = VK_ERROR_SURFACE_LOST_KHR,
+    eErrorNativeWindowInUseKHR = VK_ERROR_NATIVE_WINDOW_IN_USE_KHR,
+    eSuboptimalKHR = VK_SUBOPTIMAL_KHR,
+    eErrorOutOfDateKHR = VK_ERROR_OUT_OF_DATE_KHR,
+    eErrorIncompatibleDisplayKHR = VK_ERROR_INCOMPATIBLE_DISPLAY_KHR,
+    eErrorValidationFailedEXT = VK_ERROR_VALIDATION_FAILED_EXT,
+    eErrorInvalidShaderNV = VK_ERROR_INVALID_SHADER_NV,
+    eErrorIncompatibleVersionKHR = VK_ERROR_INCOMPATIBLE_VERSION_KHR,
+    eErrorInvalidDrmFormatModifierPlaneLayoutEXT = VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT,
+    eErrorNotPermittedEXT = VK_ERROR_NOT_PERMITTED_EXT,
+    eErrorFullScreenExclusiveModeLostEXT = VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT,
+    eThreadIdleKHR = VK_THREAD_IDLE_KHR,
+    eThreadDoneKHR = VK_THREAD_DONE_KHR,
+    eOperationDeferredKHR = VK_OPERATION_DEFERRED_KHR,
+    eOperationNotDeferredKHR = VK_OPERATION_NOT_DEFERRED_KHR,
+    ePipelineCompileRequiredEXT = VK_PIPELINE_COMPILE_REQUIRED_EXT,
+    eErrorFragmentationEXT = VK_ERROR_FRAGMENTATION_EXT,
+    eErrorInvalidDeviceAddressEXT = VK_ERROR_INVALID_DEVICE_ADDRESS_EXT,
+    eErrorInvalidExternalHandleKHR = VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR,
+    eErrorInvalidOpaqueCaptureAddressKHR = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS_KHR,
+    eErrorOutOfPoolMemoryKHR = VK_ERROR_OUT_OF_POOL_MEMORY_KHR,
+    eErrorPipelineCompileRequiredEXT = VK_ERROR_PIPELINE_COMPILE_REQUIRED_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( Result value )
+  {
+    switch ( value )
+    {
+      case Result::eSuccess : return "Success";
+      case Result::eNotReady : return "NotReady";
+      case Result::eTimeout : return "Timeout";
+      case Result::eEventSet : return "EventSet";
+      case Result::eEventReset : return "EventReset";
+      case Result::eIncomplete : return "Incomplete";
+      case Result::eErrorOutOfHostMemory : return "ErrorOutOfHostMemory";
+      case Result::eErrorOutOfDeviceMemory : return "ErrorOutOfDeviceMemory";
+      case Result::eErrorInitializationFailed : return "ErrorInitializationFailed";
+      case Result::eErrorDeviceLost : return "ErrorDeviceLost";
+      case Result::eErrorMemoryMapFailed : return "ErrorMemoryMapFailed";
+      case Result::eErrorLayerNotPresent : return "ErrorLayerNotPresent";
+      case Result::eErrorExtensionNotPresent : return "ErrorExtensionNotPresent";
+      case Result::eErrorFeatureNotPresent : return "ErrorFeatureNotPresent";
+      case Result::eErrorIncompatibleDriver : return "ErrorIncompatibleDriver";
+      case Result::eErrorTooManyObjects : return "ErrorTooManyObjects";
+      case Result::eErrorFormatNotSupported : return "ErrorFormatNotSupported";
+      case Result::eErrorFragmentedPool : return "ErrorFragmentedPool";
+      case Result::eErrorUnknown : return "ErrorUnknown";
+      case Result::eErrorOutOfPoolMemory : return "ErrorOutOfPoolMemory";
+      case Result::eErrorInvalidExternalHandle : return "ErrorInvalidExternalHandle";
+      case Result::eErrorFragmentation : return "ErrorFragmentation";
+      case Result::eErrorInvalidOpaqueCaptureAddress : return "ErrorInvalidOpaqueCaptureAddress";
+      case Result::eErrorSurfaceLostKHR : return "ErrorSurfaceLostKHR";
+      case Result::eErrorNativeWindowInUseKHR : return "ErrorNativeWindowInUseKHR";
+      case Result::eSuboptimalKHR : return "SuboptimalKHR";
+      case Result::eErrorOutOfDateKHR : return "ErrorOutOfDateKHR";
+      case Result::eErrorIncompatibleDisplayKHR : return "ErrorIncompatibleDisplayKHR";
+      case Result::eErrorValidationFailedEXT : return "ErrorValidationFailedEXT";
+      case Result::eErrorInvalidShaderNV : return "ErrorInvalidShaderNV";
+      case Result::eErrorIncompatibleVersionKHR : return "ErrorIncompatibleVersionKHR";
+      case Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT : return "ErrorInvalidDrmFormatModifierPlaneLayoutEXT";
+      case Result::eErrorNotPermittedEXT : return "ErrorNotPermittedEXT";
+      case Result::eErrorFullScreenExclusiveModeLostEXT : return "ErrorFullScreenExclusiveModeLostEXT";
+      case Result::eThreadIdleKHR : return "ThreadIdleKHR";
+      case Result::eThreadDoneKHR : return "ThreadDoneKHR";
+      case Result::eOperationDeferredKHR : return "OperationDeferredKHR";
+      case Result::eOperationNotDeferredKHR : return "OperationNotDeferredKHR";
+      case Result::ePipelineCompileRequiredEXT : return "PipelineCompileRequiredEXT";
+      default: return "invalid";
+    }
+  }
+
+  enum class SampleCountFlagBits : VkSampleCountFlags
+  {
+    e1 = VK_SAMPLE_COUNT_1_BIT,
+    e2 = VK_SAMPLE_COUNT_2_BIT,
+    e4 = VK_SAMPLE_COUNT_4_BIT,
+    e8 = VK_SAMPLE_COUNT_8_BIT,
+    e16 = VK_SAMPLE_COUNT_16_BIT,
+    e32 = VK_SAMPLE_COUNT_32_BIT,
+    e64 = VK_SAMPLE_COUNT_64_BIT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( SampleCountFlagBits value )
+  {
+    switch ( value )
+    {
+      case SampleCountFlagBits::e1 : return "1";
+      case SampleCountFlagBits::e2 : return "2";
+      case SampleCountFlagBits::e4 : return "4";
+      case SampleCountFlagBits::e8 : return "8";
+      case SampleCountFlagBits::e16 : return "16";
+      case SampleCountFlagBits::e32 : return "32";
+      case SampleCountFlagBits::e64 : return "64";
+      default: return "invalid";
+    }
+  }
+
+  enum class SamplerAddressMode
+  {
+    eRepeat = VK_SAMPLER_ADDRESS_MODE_REPEAT,
+    eMirroredRepeat = VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT,
+    eClampToEdge = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
+    eClampToBorder = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER,
+    eMirrorClampToEdge = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE,
+    eMirrorClampToEdgeKHR = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE_KHR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( SamplerAddressMode value )
+  {
+    switch ( value )
+    {
+      case SamplerAddressMode::eRepeat : return "Repeat";
+      case SamplerAddressMode::eMirroredRepeat : return "MirroredRepeat";
+      case SamplerAddressMode::eClampToEdge : return "ClampToEdge";
+      case SamplerAddressMode::eClampToBorder : return "ClampToBorder";
+      case SamplerAddressMode::eMirrorClampToEdge : return "MirrorClampToEdge";
+      default: return "invalid";
+    }
+  }
+
+  enum class SamplerCreateFlagBits : VkSamplerCreateFlags
+  {
+    eSubsampledEXT = VK_SAMPLER_CREATE_SUBSAMPLED_BIT_EXT,
+    eSubsampledCoarseReconstructionEXT = VK_SAMPLER_CREATE_SUBSAMPLED_COARSE_RECONSTRUCTION_BIT_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( SamplerCreateFlagBits value )
+  {
+    switch ( value )
+    {
+      case SamplerCreateFlagBits::eSubsampledEXT : return "SubsampledEXT";
+      case SamplerCreateFlagBits::eSubsampledCoarseReconstructionEXT : return "SubsampledCoarseReconstructionEXT";
+      default: return "invalid";
+    }
+  }
+
+  enum class SamplerMipmapMode
+  {
+    eNearest = VK_SAMPLER_MIPMAP_MODE_NEAREST,
+    eLinear = VK_SAMPLER_MIPMAP_MODE_LINEAR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( SamplerMipmapMode value )
+  {
+    switch ( value )
+    {
+      case SamplerMipmapMode::eNearest : return "Nearest";
+      case SamplerMipmapMode::eLinear : return "Linear";
+      default: return "invalid";
+    }
+  }
+
+  enum class SamplerReductionMode
+  {
+    eWeightedAverage = VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE,
+    eMin = VK_SAMPLER_REDUCTION_MODE_MIN,
+    eMax = VK_SAMPLER_REDUCTION_MODE_MAX
+  };
+  using SamplerReductionModeEXT = SamplerReductionMode;
+
+  VULKAN_HPP_INLINE std::string to_string( SamplerReductionMode value )
+  {
+    switch ( value )
+    {
+      case SamplerReductionMode::eWeightedAverage : return "WeightedAverage";
+      case SamplerReductionMode::eMin : return "Min";
+      case SamplerReductionMode::eMax : return "Max";
+      default: return "invalid";
+    }
+  }
+
+  enum class SamplerYcbcrModelConversion
+  {
+    eRgbIdentity = VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY,
+    eYcbcrIdentity = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY,
+    eYcbcr709 = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709,
+    eYcbcr601 = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601,
+    eYcbcr2020 = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020
+  };
+  using SamplerYcbcrModelConversionKHR = SamplerYcbcrModelConversion;
+
+  VULKAN_HPP_INLINE std::string to_string( SamplerYcbcrModelConversion value )
+  {
+    switch ( value )
+    {
+      case SamplerYcbcrModelConversion::eRgbIdentity : return "RgbIdentity";
+      case SamplerYcbcrModelConversion::eYcbcrIdentity : return "YcbcrIdentity";
+      case SamplerYcbcrModelConversion::eYcbcr709 : return "Ycbcr709";
+      case SamplerYcbcrModelConversion::eYcbcr601 : return "Ycbcr601";
+      case SamplerYcbcrModelConversion::eYcbcr2020 : return "Ycbcr2020";
+      default: return "invalid";
+    }
+  }
+
+  enum class SamplerYcbcrRange
+  {
+    eItuFull = VK_SAMPLER_YCBCR_RANGE_ITU_FULL,
+    eItuNarrow = VK_SAMPLER_YCBCR_RANGE_ITU_NARROW
+  };
+  using SamplerYcbcrRangeKHR = SamplerYcbcrRange;
+
+  VULKAN_HPP_INLINE std::string to_string( SamplerYcbcrRange value )
+  {
+    switch ( value )
+    {
+      case SamplerYcbcrRange::eItuFull : return "ItuFull";
+      case SamplerYcbcrRange::eItuNarrow : return "ItuNarrow";
+      default: return "invalid";
+    }
+  }
+
+  enum class ScopeNV
+  {
+    eDevice = VK_SCOPE_DEVICE_NV,
+    eWorkgroup = VK_SCOPE_WORKGROUP_NV,
+    eSubgroup = VK_SCOPE_SUBGROUP_NV,
+    eQueueFamily = VK_SCOPE_QUEUE_FAMILY_NV
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( ScopeNV value )
+  {
+    switch ( value )
+    {
+      case ScopeNV::eDevice : return "Device";
+      case ScopeNV::eWorkgroup : return "Workgroup";
+      case ScopeNV::eSubgroup : return "Subgroup";
+      case ScopeNV::eQueueFamily : return "QueueFamily";
+      default: return "invalid";
+    }
+  }
+
+  enum class SemaphoreCreateFlagBits : VkSemaphoreCreateFlags
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( SemaphoreCreateFlagBits )
+  {
+    return "(void)";
+  }
+
+  enum class SemaphoreImportFlagBits : VkSemaphoreImportFlags
+  {
+    eTemporary = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT
+  };
+  using SemaphoreImportFlagBitsKHR = SemaphoreImportFlagBits;
+
+  VULKAN_HPP_INLINE std::string to_string( SemaphoreImportFlagBits value )
+  {
+    switch ( value )
+    {
+      case SemaphoreImportFlagBits::eTemporary : return "Temporary";
+      default: return "invalid";
+    }
+  }
+
+  enum class SemaphoreType
+  {
+    eBinary = VK_SEMAPHORE_TYPE_BINARY,
+    eTimeline = VK_SEMAPHORE_TYPE_TIMELINE
+  };
+  using SemaphoreTypeKHR = SemaphoreType;
+
+  VULKAN_HPP_INLINE std::string to_string( SemaphoreType value )
+  {
+    switch ( value )
+    {
+      case SemaphoreType::eBinary : return "Binary";
+      case SemaphoreType::eTimeline : return "Timeline";
+      default: return "invalid";
+    }
+  }
+
+  enum class SemaphoreWaitFlagBits : VkSemaphoreWaitFlags
+  {
+    eAny = VK_SEMAPHORE_WAIT_ANY_BIT
+  };
+  using SemaphoreWaitFlagBitsKHR = SemaphoreWaitFlagBits;
+
+  VULKAN_HPP_INLINE std::string to_string( SemaphoreWaitFlagBits value )
+  {
+    switch ( value )
+    {
+      case SemaphoreWaitFlagBits::eAny : return "Any";
+      default: return "invalid";
+    }
+  }
+
+  enum class ShaderCorePropertiesFlagBitsAMD : VkShaderCorePropertiesFlagsAMD
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( ShaderCorePropertiesFlagBitsAMD )
+  {
+    return "(void)";
+  }
+
+  enum class ShaderFloatControlsIndependence
+  {
+    e32BitOnly = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY,
+    eAll = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL,
+    eNone = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE
+  };
+  using ShaderFloatControlsIndependenceKHR = ShaderFloatControlsIndependence;
+
+  VULKAN_HPP_INLINE std::string to_string( ShaderFloatControlsIndependence value )
+  {
+    switch ( value )
+    {
+      case ShaderFloatControlsIndependence::e32BitOnly : return "32BitOnly";
+      case ShaderFloatControlsIndependence::eAll : return "All";
+      case ShaderFloatControlsIndependence::eNone : return "None";
+      default: return "invalid";
+    }
+  }
+
+  enum class ShaderInfoTypeAMD
+  {
+    eStatistics = VK_SHADER_INFO_TYPE_STATISTICS_AMD,
+    eBinary = VK_SHADER_INFO_TYPE_BINARY_AMD,
+    eDisassembly = VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( ShaderInfoTypeAMD value )
+  {
+    switch ( value )
+    {
+      case ShaderInfoTypeAMD::eStatistics : return "Statistics";
+      case ShaderInfoTypeAMD::eBinary : return "Binary";
+      case ShaderInfoTypeAMD::eDisassembly : return "Disassembly";
+      default: return "invalid";
+    }
+  }
+
+  enum class ShaderModuleCreateFlagBits : VkShaderModuleCreateFlags
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( ShaderModuleCreateFlagBits )
+  {
+    return "(void)";
+  }
+
+  enum class ShaderStageFlagBits : VkShaderStageFlags
+  {
+    eVertex = VK_SHADER_STAGE_VERTEX_BIT,
+    eTessellationControl = VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT,
+    eTessellationEvaluation = VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT,
+    eGeometry = VK_SHADER_STAGE_GEOMETRY_BIT,
+    eFragment = VK_SHADER_STAGE_FRAGMENT_BIT,
+    eCompute = VK_SHADER_STAGE_COMPUTE_BIT,
+    eAllGraphics = VK_SHADER_STAGE_ALL_GRAPHICS,
+    eAll = VK_SHADER_STAGE_ALL,
+    eRaygenKHR = VK_SHADER_STAGE_RAYGEN_BIT_KHR,
+    eAnyHitKHR = VK_SHADER_STAGE_ANY_HIT_BIT_KHR,
+    eClosestHitKHR = VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR,
+    eMissKHR = VK_SHADER_STAGE_MISS_BIT_KHR,
+    eIntersectionKHR = VK_SHADER_STAGE_INTERSECTION_BIT_KHR,
+    eCallableKHR = VK_SHADER_STAGE_CALLABLE_BIT_KHR,
+    eTaskNV = VK_SHADER_STAGE_TASK_BIT_NV,
+    eMeshNV = VK_SHADER_STAGE_MESH_BIT_NV,
+    eAnyHitNV = VK_SHADER_STAGE_ANY_HIT_BIT_NV,
+    eCallableNV = VK_SHADER_STAGE_CALLABLE_BIT_NV,
+    eClosestHitNV = VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV,
+    eIntersectionNV = VK_SHADER_STAGE_INTERSECTION_BIT_NV,
+    eMissNV = VK_SHADER_STAGE_MISS_BIT_NV,
+    eRaygenNV = VK_SHADER_STAGE_RAYGEN_BIT_NV
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( ShaderStageFlagBits value )
+  {
+    switch ( value )
+    {
+      case ShaderStageFlagBits::eVertex : return "Vertex";
+      case ShaderStageFlagBits::eTessellationControl : return "TessellationControl";
+      case ShaderStageFlagBits::eTessellationEvaluation : return "TessellationEvaluation";
+      case ShaderStageFlagBits::eGeometry : return "Geometry";
+      case ShaderStageFlagBits::eFragment : return "Fragment";
+      case ShaderStageFlagBits::eCompute : return "Compute";
+      case ShaderStageFlagBits::eAllGraphics : return "AllGraphics";
+      case ShaderStageFlagBits::eAll : return "All";
+      case ShaderStageFlagBits::eRaygenKHR : return "RaygenKHR";
+      case ShaderStageFlagBits::eAnyHitKHR : return "AnyHitKHR";
+      case ShaderStageFlagBits::eClosestHitKHR : return "ClosestHitKHR";
+      case ShaderStageFlagBits::eMissKHR : return "MissKHR";
+      case ShaderStageFlagBits::eIntersectionKHR : return "IntersectionKHR";
+      case ShaderStageFlagBits::eCallableKHR : return "CallableKHR";
+      case ShaderStageFlagBits::eTaskNV : return "TaskNV";
+      case ShaderStageFlagBits::eMeshNV : return "MeshNV";
+      default: return "invalid";
+    }
+  }
+
+  enum class ShadingRatePaletteEntryNV
+  {
+    eNoInvocations = VK_SHADING_RATE_PALETTE_ENTRY_NO_INVOCATIONS_NV,
+    e16InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_16_INVOCATIONS_PER_PIXEL_NV,
+    e8InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_8_INVOCATIONS_PER_PIXEL_NV,
+    e4InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_4_INVOCATIONS_PER_PIXEL_NV,
+    e2InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_2_INVOCATIONS_PER_PIXEL_NV,
+    e1InvocationPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_PIXEL_NV,
+    e1InvocationPer2X1Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X1_PIXELS_NV,
+    e1InvocationPer1X2Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_1X2_PIXELS_NV,
+    e1InvocationPer2X2Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X2_PIXELS_NV,
+    e1InvocationPer4X2Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X2_PIXELS_NV,
+    e1InvocationPer2X4Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X4_PIXELS_NV,
+    e1InvocationPer4X4Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X4_PIXELS_NV
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( ShadingRatePaletteEntryNV value )
+  {
+    switch ( value )
+    {
+      case ShadingRatePaletteEntryNV::eNoInvocations : return "NoInvocations";
+      case ShadingRatePaletteEntryNV::e16InvocationsPerPixel : return "16InvocationsPerPixel";
+      case ShadingRatePaletteEntryNV::e8InvocationsPerPixel : return "8InvocationsPerPixel";
+      case ShadingRatePaletteEntryNV::e4InvocationsPerPixel : return "4InvocationsPerPixel";
+      case ShadingRatePaletteEntryNV::e2InvocationsPerPixel : return "2InvocationsPerPixel";
+      case ShadingRatePaletteEntryNV::e1InvocationPerPixel : return "1InvocationPerPixel";
+      case ShadingRatePaletteEntryNV::e1InvocationPer2X1Pixels : return "1InvocationPer2X1Pixels";
+      case ShadingRatePaletteEntryNV::e1InvocationPer1X2Pixels : return "1InvocationPer1X2Pixels";
+      case ShadingRatePaletteEntryNV::e1InvocationPer2X2Pixels : return "1InvocationPer2X2Pixels";
+      case ShadingRatePaletteEntryNV::e1InvocationPer4X2Pixels : return "1InvocationPer4X2Pixels";
+      case ShadingRatePaletteEntryNV::e1InvocationPer2X4Pixels : return "1InvocationPer2X4Pixels";
+      case ShadingRatePaletteEntryNV::e1InvocationPer4X4Pixels : return "1InvocationPer4X4Pixels";
+      default: return "invalid";
+    }
+  }
+
+  enum class SharingMode
+  {
+    eExclusive = VK_SHARING_MODE_EXCLUSIVE,
+    eConcurrent = VK_SHARING_MODE_CONCURRENT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( SharingMode value )
+  {
+    switch ( value )
+    {
+      case SharingMode::eExclusive : return "Exclusive";
+      case SharingMode::eConcurrent : return "Concurrent";
+      default: return "invalid";
+    }
+  }
+
+  enum class SparseImageFormatFlagBits : VkSparseImageFormatFlags
+  {
+    eSingleMiptail = VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT,
+    eAlignedMipSize = VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT,
+    eNonstandardBlockSize = VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( SparseImageFormatFlagBits value )
+  {
+    switch ( value )
+    {
+      case SparseImageFormatFlagBits::eSingleMiptail : return "SingleMiptail";
+      case SparseImageFormatFlagBits::eAlignedMipSize : return "AlignedMipSize";
+      case SparseImageFormatFlagBits::eNonstandardBlockSize : return "NonstandardBlockSize";
+      default: return "invalid";
+    }
+  }
+
+  enum class SparseMemoryBindFlagBits : VkSparseMemoryBindFlags
+  {
+    eMetadata = VK_SPARSE_MEMORY_BIND_METADATA_BIT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( SparseMemoryBindFlagBits value )
+  {
+    switch ( value )
+    {
+      case SparseMemoryBindFlagBits::eMetadata : return "Metadata";
+      default: return "invalid";
+    }
+  }
+
+  enum class StencilFaceFlagBits : VkStencilFaceFlags
+  {
+    eFront = VK_STENCIL_FACE_FRONT_BIT,
+    eBack = VK_STENCIL_FACE_BACK_BIT,
+    eFrontAndBack = VK_STENCIL_FACE_FRONT_AND_BACK,
+    eVkStencilFrontAndBack = VK_STENCIL_FRONT_AND_BACK
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( StencilFaceFlagBits value )
+  {
+    switch ( value )
+    {
+      case StencilFaceFlagBits::eFront : return "Front";
+      case StencilFaceFlagBits::eBack : return "Back";
+      case StencilFaceFlagBits::eFrontAndBack : return "FrontAndBack";
+      default: return "invalid";
+    }
+  }
+
+  enum class StencilOp
+  {
+    eKeep = VK_STENCIL_OP_KEEP,
+    eZero = VK_STENCIL_OP_ZERO,
+    eReplace = VK_STENCIL_OP_REPLACE,
+    eIncrementAndClamp = VK_STENCIL_OP_INCREMENT_AND_CLAMP,
+    eDecrementAndClamp = VK_STENCIL_OP_DECREMENT_AND_CLAMP,
+    eInvert = VK_STENCIL_OP_INVERT,
+    eIncrementAndWrap = VK_STENCIL_OP_INCREMENT_AND_WRAP,
+    eDecrementAndWrap = VK_STENCIL_OP_DECREMENT_AND_WRAP
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( StencilOp value )
+  {
+    switch ( value )
+    {
+      case StencilOp::eKeep : return "Keep";
+      case StencilOp::eZero : return "Zero";
+      case StencilOp::eReplace : return "Replace";
+      case StencilOp::eIncrementAndClamp : return "IncrementAndClamp";
+      case StencilOp::eDecrementAndClamp : return "DecrementAndClamp";
+      case StencilOp::eInvert : return "Invert";
+      case StencilOp::eIncrementAndWrap : return "IncrementAndWrap";
+      case StencilOp::eDecrementAndWrap : return "DecrementAndWrap";
+      default: return "invalid";
+    }
+  }
 
   enum class StructureType
   {
@@ -9189,128 +8133,119 @@
     eLoaderDeviceCreateInfo = VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO,
     ePhysicalDeviceSubgroupProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES,
     eBindBufferMemoryInfo = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO,
-    eBindBufferMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO,
     eBindImageMemoryInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO,
-    eBindImageMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO,
     ePhysicalDevice16BitStorageFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES,
-    ePhysicalDevice16BitStorageFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES,
     eMemoryDedicatedRequirements = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS,
-    eMemoryDedicatedRequirementsKHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS,
     eMemoryDedicatedAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO,
-    eMemoryDedicatedAllocateInfoKHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO,
     eMemoryAllocateFlagsInfo = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO,
-    eMemoryAllocateFlagsInfoKHR = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO,
     eDeviceGroupRenderPassBeginInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO,
-    eDeviceGroupRenderPassBeginInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO,
     eDeviceGroupCommandBufferBeginInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO,
-    eDeviceGroupCommandBufferBeginInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO,
     eDeviceGroupSubmitInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO,
-    eDeviceGroupSubmitInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO,
     eDeviceGroupBindSparseInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO,
-    eDeviceGroupBindSparseInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO,
     eBindBufferMemoryDeviceGroupInfo = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO,
-    eBindBufferMemoryDeviceGroupInfoKHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO,
     eBindImageMemoryDeviceGroupInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO,
-    eBindImageMemoryDeviceGroupInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO,
     ePhysicalDeviceGroupProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES,
-    ePhysicalDeviceGroupPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES,
     eDeviceGroupDeviceCreateInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO,
-    eDeviceGroupDeviceCreateInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO,
     eBufferMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2,
-    eBufferMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2,
     eImageMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2,
-    eImageMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2,
     eImageSparseMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2,
-    eImageSparseMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2,
     eMemoryRequirements2 = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2,
-    eMemoryRequirements2KHR = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2,
     eSparseImageMemoryRequirements2 = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2,
-    eSparseImageMemoryRequirements2KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2,
     ePhysicalDeviceFeatures2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2,
-    ePhysicalDeviceFeatures2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2,
     ePhysicalDeviceProperties2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2,
-    ePhysicalDeviceProperties2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2,
     eFormatProperties2 = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2,
-    eFormatProperties2KHR = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2,
     eImageFormatProperties2 = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2,
-    eImageFormatProperties2KHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2,
     ePhysicalDeviceImageFormatInfo2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2,
-    ePhysicalDeviceImageFormatInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2,
     eQueueFamilyProperties2 = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2,
-    eQueueFamilyProperties2KHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2,
     ePhysicalDeviceMemoryProperties2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2,
-    ePhysicalDeviceMemoryProperties2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2,
     eSparseImageFormatProperties2 = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2,
-    eSparseImageFormatProperties2KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2,
     ePhysicalDeviceSparseImageFormatInfo2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2,
-    ePhysicalDeviceSparseImageFormatInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2,
     ePhysicalDevicePointClippingProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES,
-    ePhysicalDevicePointClippingPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES,
     eRenderPassInputAttachmentAspectCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO,
-    eRenderPassInputAttachmentAspectCreateInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO,
     eImageViewUsageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO,
-    eImageViewUsageCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO,
     ePipelineTessellationDomainOriginStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO,
-    ePipelineTessellationDomainOriginStateCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO,
     eRenderPassMultiviewCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO,
-    eRenderPassMultiviewCreateInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO,
     ePhysicalDeviceMultiviewFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES,
-    ePhysicalDeviceMultiviewFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES,
     ePhysicalDeviceMultiviewProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES,
-    ePhysicalDeviceMultiviewPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES,
-    ePhysicalDeviceVariablePointerFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES,
-    ePhysicalDeviceVariablePointerFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES,
+    ePhysicalDeviceVariablePointersFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES,
     eProtectedSubmitInfo = VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO,
     ePhysicalDeviceProtectedMemoryFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES,
     ePhysicalDeviceProtectedMemoryProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES,
     eDeviceQueueInfo2 = VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2,
     eSamplerYcbcrConversionCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO,
-    eSamplerYcbcrConversionCreateInfoKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO,
     eSamplerYcbcrConversionInfo = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO,
-    eSamplerYcbcrConversionInfoKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO,
     eBindImagePlaneMemoryInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO,
-    eBindImagePlaneMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO,
     eImagePlaneMemoryRequirementsInfo = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO,
-    eImagePlaneMemoryRequirementsInfoKHR = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO,
     ePhysicalDeviceSamplerYcbcrConversionFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES,
-    ePhysicalDeviceSamplerYcbcrConversionFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES,
     eSamplerYcbcrConversionImageFormatProperties = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES,
-    eSamplerYcbcrConversionImageFormatPropertiesKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES,
     eDescriptorUpdateTemplateCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO,
-    eDescriptorUpdateTemplateCreateInfoKHR = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO,
     ePhysicalDeviceExternalImageFormatInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO,
-    ePhysicalDeviceExternalImageFormatInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO,
     eExternalImageFormatProperties = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES,
-    eExternalImageFormatPropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES,
     ePhysicalDeviceExternalBufferInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO,
-    ePhysicalDeviceExternalBufferInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO,
     eExternalBufferProperties = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES,
-    eExternalBufferPropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES,
     ePhysicalDeviceIdProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES,
-    ePhysicalDeviceIdPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES,
     eExternalMemoryBufferCreateInfo = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO,
-    eExternalMemoryBufferCreateInfoKHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO,
     eExternalMemoryImageCreateInfo = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO,
-    eExternalMemoryImageCreateInfoKHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO,
     eExportMemoryAllocateInfo = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO,
-    eExportMemoryAllocateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO,
     ePhysicalDeviceExternalFenceInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO,
-    ePhysicalDeviceExternalFenceInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO,
     eExternalFenceProperties = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES,
-    eExternalFencePropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES,
     eExportFenceCreateInfo = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO,
-    eExportFenceCreateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO,
     eExportSemaphoreCreateInfo = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO,
-    eExportSemaphoreCreateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO,
     ePhysicalDeviceExternalSemaphoreInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO,
-    ePhysicalDeviceExternalSemaphoreInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO,
     eExternalSemaphoreProperties = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES,
-    eExternalSemaphorePropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES,
     ePhysicalDeviceMaintenance3Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES,
-    ePhysicalDeviceMaintenance3PropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES,
     eDescriptorSetLayoutSupport = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT,
-    eDescriptorSetLayoutSupportKHR = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT,
-    ePhysicalDeviceShaderDrawParameterFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES,
+    ePhysicalDeviceShaderDrawParametersFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES,
+    ePhysicalDeviceVulkan11Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES,
+    ePhysicalDeviceVulkan11Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES,
+    ePhysicalDeviceVulkan12Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES,
+    ePhysicalDeviceVulkan12Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_PROPERTIES,
+    eImageFormatListCreateInfo = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO,
+    eAttachmentDescription2 = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2,
+    eAttachmentReference2 = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2,
+    eSubpassDescription2 = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2,
+    eSubpassDependency2 = VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2,
+    eRenderPassCreateInfo2 = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2,
+    eSubpassBeginInfo = VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO,
+    eSubpassEndInfo = VK_STRUCTURE_TYPE_SUBPASS_END_INFO,
+    ePhysicalDevice8BitStorageFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES,
+    ePhysicalDeviceDriverProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES,
+    ePhysicalDeviceShaderAtomicInt64Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES,
+    ePhysicalDeviceShaderFloat16Int8Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES,
+    ePhysicalDeviceFloatControlsProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES,
+    eDescriptorSetLayoutBindingFlagsCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO,
+    ePhysicalDeviceDescriptorIndexingFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES,
+    ePhysicalDeviceDescriptorIndexingProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES,
+    eDescriptorSetVariableDescriptorCountAllocateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO,
+    eDescriptorSetVariableDescriptorCountLayoutSupport = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT,
+    ePhysicalDeviceDepthStencilResolveProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES,
+    eSubpassDescriptionDepthStencilResolve = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE,
+    ePhysicalDeviceScalarBlockLayoutFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES,
+    eImageStencilUsageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO,
+    ePhysicalDeviceSamplerFilterMinmaxProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES,
+    eSamplerReductionModeCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO,
+    ePhysicalDeviceVulkanMemoryModelFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES,
+    ePhysicalDeviceImagelessFramebufferFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES,
+    eFramebufferAttachmentsCreateInfo = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO,
+    eFramebufferAttachmentImageInfo = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO,
+    eRenderPassAttachmentBeginInfo = VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO,
+    ePhysicalDeviceUniformBufferStandardLayoutFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES,
+    ePhysicalDeviceShaderSubgroupExtendedTypesFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES,
+    ePhysicalDeviceSeparateDepthStencilLayoutsFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES,
+    eAttachmentReferenceStencilLayout = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT,
+    eAttachmentDescriptionStencilLayout = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT,
+    ePhysicalDeviceHostQueryResetFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES,
+    ePhysicalDeviceTimelineSemaphoreFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES,
+    ePhysicalDeviceTimelineSemaphoreProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES,
+    eSemaphoreTypeCreateInfo = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO,
+    eTimelineSemaphoreSubmitInfo = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO,
+    eSemaphoreWaitInfo = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO,
+    eSemaphoreSignalInfo = VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO,
+    ePhysicalDeviceBufferDeviceAddressFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES,
+    eBufferDeviceAddressInfo = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO,
+    eBufferOpaqueCaptureAddressCreateInfo = VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO,
+    eMemoryOpaqueCaptureAddressAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO,
+    eDeviceMemoryOpaqueCaptureAddressInfo = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO,
     eSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR,
     ePresentInfoKHR = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR,
     eDeviceGroupPresentCapabilitiesKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR,
@@ -9328,7 +8263,6 @@
     eAndroidSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR,
     eWin32SurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR,
     eDebugReportCallbackCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT,
-    eDebugReportCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT,
     ePipelineRasterizationStateRasterizationOrderAMD = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD,
     eDebugMarkerObjectNameInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT,
     eDebugMarkerObjectTagInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT,
@@ -9339,7 +8273,10 @@
     ePhysicalDeviceTransformFeedbackFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT,
     ePhysicalDeviceTransformFeedbackPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT,
     ePipelineRasterizationStateStreamCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT,
+    eImageViewHandleInfoNVX = VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX,
+    eImageViewAddressPropertiesNVX = VK_STRUCTURE_TYPE_IMAGE_VIEW_ADDRESS_PROPERTIES_NVX,
     eTextureLodGatherFormatPropertiesAMD = VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD,
+    eStreamDescriptorSurfaceCreateInfoGGP = VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP,
     ePhysicalDeviceCornerSampledImageFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV,
     eExternalMemoryImageCreateInfoNV = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV,
     eExportMemoryAllocateInfoNV = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV,
@@ -9348,6 +8285,7 @@
     eWin32KeyedMutexAcquireReleaseInfoNV = VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV,
     eValidationFlagsEXT = VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT,
     eViSurfaceCreateInfoNN = VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN,
+    ePhysicalDeviceTextureCompressionAstcHdrFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT,
     eImageViewAstcDecodeModeEXT = VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT,
     ePhysicalDeviceAstcDecodeFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT,
     eImportMemoryWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR,
@@ -9369,12 +8307,6 @@
     ePhysicalDeviceConditionalRenderingFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT,
     eConditionalRenderingBeginInfoEXT = VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT,
     ePresentRegionsKHR = VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR,
-    eObjectTableCreateInfoNVX = VK_STRUCTURE_TYPE_OBJECT_TABLE_CREATE_INFO_NVX,
-    eIndirectCommandsLayoutCreateInfoNVX = VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX,
-    eCmdProcessCommandsInfoNVX = VK_STRUCTURE_TYPE_CMD_PROCESS_COMMANDS_INFO_NVX,
-    eCmdReserveSpaceForCommandsInfoNVX = VK_STRUCTURE_TYPE_CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX,
-    eDeviceGeneratedCommandsLimitsNVX = VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_LIMITS_NVX,
-    eDeviceGeneratedCommandsFeaturesNVX = VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_FEATURES_NVX,
     ePipelineViewportWScalingStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV,
     eSurfaceCapabilities2EXT = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT,
     eDisplayPowerInfoEXT = VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT,
@@ -9388,20 +8320,22 @@
     ePipelineDiscardRectangleStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT,
     ePhysicalDeviceConservativeRasterizationPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT,
     ePipelineRasterizationConservativeStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT,
+    ePhysicalDeviceDepthClipEnableFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT,
+    ePipelineRasterizationDepthClipStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT,
     eHdrMetadataEXT = VK_STRUCTURE_TYPE_HDR_METADATA_EXT,
-    eAttachmentDescription2KHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR,
-    eAttachmentReference2KHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR,
-    eSubpassDescription2KHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR,
-    eSubpassDependency2KHR = VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR,
-    eRenderPassCreateInfo2KHR = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR,
-    eSubpassBeginInfoKHR = VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR,
-    eSubpassEndInfoKHR = VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR,
     eSharedPresentSurfaceCapabilitiesKHR = VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR,
     eImportFenceWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR,
     eExportFenceWin32HandleInfoKHR = VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR,
     eFenceGetWin32HandleInfoKHR = VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR,
     eImportFenceFdInfoKHR = VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR,
     eFenceGetFdInfoKHR = VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR,
+    ePhysicalDevicePerformanceQueryFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_FEATURES_KHR,
+    ePhysicalDevicePerformanceQueryPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_PROPERTIES_KHR,
+    eQueryPoolPerformanceCreateInfoKHR = VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR,
+    ePerformanceQuerySubmitInfoKHR = VK_STRUCTURE_TYPE_PERFORMANCE_QUERY_SUBMIT_INFO_KHR,
+    eAcquireProfilingLockInfoKHR = VK_STRUCTURE_TYPE_ACQUIRE_PROFILING_LOCK_INFO_KHR,
+    ePerformanceCounterKHR = VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_KHR,
+    ePerformanceCounterDescriptionKHR = VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_DESCRIPTION_KHR,
     ePhysicalDeviceSurfaceInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR,
     eSurfaceCapabilities2KHR = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR,
     eSurfaceFormat2KHR = VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR,
@@ -9423,8 +8357,6 @@
     eImportAndroidHardwareBufferInfoANDROID = VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID,
     eMemoryGetAndroidHardwareBufferInfoANDROID = VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID,
     eExternalFormatANDROID = VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID,
-    ePhysicalDeviceSamplerFilterMinmaxPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT,
-    eSamplerReductionModeCreateInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT,
     ePhysicalDeviceInlineUniformBlockFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT,
     ePhysicalDeviceInlineUniformBlockPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT,
     eWriteDescriptorSetInlineUniformBlockEXT = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT,
@@ -9434,25 +8366,42 @@
     ePipelineSampleLocationsStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT,
     ePhysicalDeviceSampleLocationsPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT,
     eMultisamplePropertiesEXT = VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT,
-    eImageFormatListCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR,
     ePhysicalDeviceBlendOperationAdvancedFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT,
     ePhysicalDeviceBlendOperationAdvancedPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT,
     ePipelineColorBlendAdvancedStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT,
     ePipelineCoverageToColorStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV,
+    eBindAccelerationStructureMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_KHR,
+    eWriteDescriptorSetAccelerationStructureKHR = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR,
+    eAccelerationStructureBuildGeometryInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_GEOMETRY_INFO_KHR,
+    eAccelerationStructureCreateGeometryTypeInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_GEOMETRY_TYPE_INFO_KHR,
+    eAccelerationStructureDeviceAddressInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_DEVICE_ADDRESS_INFO_KHR,
+    eAccelerationStructureGeometryAabbsDataKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_AABBS_DATA_KHR,
+    eAccelerationStructureGeometryInstancesDataKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_INSTANCES_DATA_KHR,
+    eAccelerationStructureGeometryTrianglesDataKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_TRIANGLES_DATA_KHR,
+    eAccelerationStructureGeometryKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_KHR,
+    eAccelerationStructureMemoryRequirementsInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_KHR,
+    eAccelerationStructureVersionKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_VERSION_KHR,
+    eCopyAccelerationStructureInfoKHR = VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_INFO_KHR,
+    eCopyAccelerationStructureToMemoryInfoKHR = VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_TO_MEMORY_INFO_KHR,
+    eCopyMemoryToAccelerationStructureInfoKHR = VK_STRUCTURE_TYPE_COPY_MEMORY_TO_ACCELERATION_STRUCTURE_INFO_KHR,
+    ePhysicalDeviceRayTracingFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_FEATURES_KHR,
+    ePhysicalDeviceRayTracingPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_KHR,
+    eRayTracingPipelineCreateInfoKHR = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_KHR,
+    eRayTracingShaderGroupCreateInfoKHR = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_KHR,
+    eAccelerationStructureCreateInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_KHR,
+    eRayTracingPipelineInterfaceCreateInfoKHR = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_INTERFACE_CREATE_INFO_KHR,
     ePipelineCoverageModulationStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV,
+    ePhysicalDeviceShaderSmBuiltinsFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV,
+    ePhysicalDeviceShaderSmBuiltinsPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV,
     eDrmFormatModifierPropertiesListEXT = VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
-    eDrmFormatModifierPropertiesEXT = VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT,
     ePhysicalDeviceImageDrmFormatModifierInfoEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT,
     eImageDrmFormatModifierListCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT,
     eImageDrmFormatModifierExplicitCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT,
     eImageDrmFormatModifierPropertiesEXT = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT,
     eValidationCacheCreateInfoEXT = VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT,
     eShaderModuleValidationCacheCreateInfoEXT = VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT,
-    eDescriptorSetLayoutBindingFlagsCreateInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT,
-    ePhysicalDeviceDescriptorIndexingFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT,
-    ePhysicalDeviceDescriptorIndexingPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT,
-    eDescriptorSetVariableDescriptorCountAllocateInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT,
-    eDescriptorSetVariableDescriptorCountLayoutSupportEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT,
+    ePhysicalDevicePortabilitySubsetFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_FEATURES_KHR,
+    ePhysicalDevicePortabilitySubsetPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_PROPERTIES_KHR,
     ePipelineViewportShadingRateImageStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV,
     ePhysicalDeviceShadingRateImageFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV,
     ePhysicalDeviceShadingRateImagePropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV,
@@ -9462,27 +8411,28 @@
     eGeometryNV = VK_STRUCTURE_TYPE_GEOMETRY_NV,
     eGeometryTrianglesNV = VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV,
     eGeometryAabbNV = VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV,
-    eBindAccelerationStructureMemoryInfoNV = VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV,
-    eWriteDescriptorSetAccelerationStructureNV = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV,
     eAccelerationStructureMemoryRequirementsInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV,
     ePhysicalDeviceRayTracingPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV,
     eRayTracingShaderGroupCreateInfoNV = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV,
     eAccelerationStructureInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV,
     ePhysicalDeviceRepresentativeFragmentTestFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV,
     ePipelineRepresentativeFragmentTestStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV,
+    ePhysicalDeviceImageViewImageFormatInfoEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT,
+    eFilterCubicImageViewImageFormatPropertiesEXT = VK_STRUCTURE_TYPE_FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT,
     eDeviceQueueGlobalPriorityCreateInfoEXT = VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT,
-    ePhysicalDevice8BitStorageFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR,
     eImportMemoryHostPointerInfoEXT = VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT,
     eMemoryHostPointerPropertiesEXT = VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT,
     ePhysicalDeviceExternalMemoryHostPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT,
-    ePhysicalDeviceShaderAtomicInt64FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR,
+    ePhysicalDeviceShaderClockFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR,
+    ePipelineCompilerControlCreateInfoAMD = VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD,
     eCalibratedTimestampInfoEXT = VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT,
     ePhysicalDeviceShaderCorePropertiesAMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD,
     eDeviceMemoryOverallocationCreateInfoAMD = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD,
     ePhysicalDeviceVertexAttributeDivisorPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT,
     ePipelineVertexInputDivisorStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT,
     ePhysicalDeviceVertexAttributeDivisorFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT,
-    ePhysicalDeviceDriverPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR,
+    ePresentFrameTokenGGP = VK_STRUCTURE_TYPE_PRESENT_FRAME_TOKEN_GGP,
+    ePipelineCreationFeedbackCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT,
     ePhysicalDeviceComputeShaderDerivativesFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV,
     ePhysicalDeviceMeshShaderFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV,
     ePhysicalDeviceMeshShaderPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV,
@@ -9492,10867 +8442,7344 @@
     ePhysicalDeviceExclusiveScissorFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV,
     eCheckpointDataNV = VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV,
     eQueueFamilyCheckpointPropertiesNV = VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV,
-    ePhysicalDeviceVulkanMemoryModelFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR,
+    ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL,
+    eQueryPoolPerformanceQueryCreateInfoINTEL = VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_QUERY_CREATE_INFO_INTEL,
+    eInitializePerformanceApiInfoINTEL = VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL,
+    ePerformanceMarkerInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_MARKER_INFO_INTEL,
+    ePerformanceStreamMarkerInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_STREAM_MARKER_INFO_INTEL,
+    ePerformanceOverrideInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_OVERRIDE_INFO_INTEL,
+    ePerformanceConfigurationAcquireInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL,
     ePhysicalDevicePciBusInfoPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT,
-    eImagepipeSurfaceCreateInfoFUCHSIA = VK_STRUCTURE_TYPE_IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA
+    eDisplayNativeHdrSurfaceCapabilitiesAMD = VK_STRUCTURE_TYPE_DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD,
+    eSwapchainDisplayNativeHdrCreateInfoAMD = VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD,
+    eImagepipeSurfaceCreateInfoFUCHSIA = VK_STRUCTURE_TYPE_IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA,
+    ePhysicalDeviceShaderTerminateInvocationFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES_KHR,
+    eMetalSurfaceCreateInfoEXT = VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT,
+    ePhysicalDeviceFragmentDensityMapFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT,
+    ePhysicalDeviceFragmentDensityMapPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT,
+    eRenderPassFragmentDensityMapCreateInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT,
+    ePhysicalDeviceSubgroupSizeControlPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT,
+    ePipelineShaderStageRequiredSubgroupSizeCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT,
+    ePhysicalDeviceSubgroupSizeControlFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT,
+    eFragmentShadingRateAttachmentInfoKHR = VK_STRUCTURE_TYPE_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR,
+    ePipelineFragmentShadingRateStateCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_STATE_CREATE_INFO_KHR,
+    ePhysicalDeviceFragmentShadingRatePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_PROPERTIES_KHR,
+    ePhysicalDeviceFragmentShadingRateFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_FEATURES_KHR,
+    ePhysicalDeviceFragmentShadingRateKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_KHR,
+    ePhysicalDeviceShaderCoreProperties2AMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD,
+    ePhysicalDeviceCoherentMemoryFeaturesAMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD,
+    ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_ATOMIC_INT64_FEATURES_EXT,
+    ePhysicalDeviceMemoryBudgetPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT,
+    ePhysicalDeviceMemoryPriorityFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT,
+    eMemoryPriorityAllocateInfoEXT = VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT,
+    eSurfaceProtectedCapabilitiesKHR = VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR,
+    ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV,
+    ePhysicalDeviceBufferDeviceAddressFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT,
+    eBufferDeviceAddressCreateInfoEXT = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT,
+    ePhysicalDeviceToolPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES_EXT,
+    eValidationFeaturesEXT = VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT,
+    ePhysicalDeviceCooperativeMatrixFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV,
+    eCooperativeMatrixPropertiesNV = VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_NV,
+    ePhysicalDeviceCooperativeMatrixPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_NV,
+    ePhysicalDeviceCoverageReductionModeFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV,
+    ePipelineCoverageReductionStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_REDUCTION_STATE_CREATE_INFO_NV,
+    eFramebufferMixedSamplesCombinationNV = VK_STRUCTURE_TYPE_FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV,
+    ePhysicalDeviceFragmentShaderInterlockFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT,
+    ePhysicalDeviceYcbcrImageArraysFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT,
+    eSurfaceFullScreenExclusiveInfoEXT = VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT,
+    eSurfaceCapabilitiesFullScreenExclusiveEXT = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT,
+    eSurfaceFullScreenExclusiveWin32InfoEXT = VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT,
+    eHeadlessSurfaceCreateInfoEXT = VK_STRUCTURE_TYPE_HEADLESS_SURFACE_CREATE_INFO_EXT,
+    ePhysicalDeviceLineRasterizationFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT,
+    ePipelineRasterizationLineStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT,
+    ePhysicalDeviceLineRasterizationPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT,
+    ePhysicalDeviceShaderAtomicFloatFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_FEATURES_EXT,
+    ePhysicalDeviceIndexTypeUint8FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT,
+    ePhysicalDeviceExtendedDynamicStateFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_FEATURES_EXT,
+    eDeferredOperationInfoKHR = VK_STRUCTURE_TYPE_DEFERRED_OPERATION_INFO_KHR,
+    ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR,
+    ePipelineInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR,
+    ePipelineExecutablePropertiesKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR,
+    ePipelineExecutableInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR,
+    ePipelineExecutableStatisticKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR,
+    ePipelineExecutableInternalRepresentationKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR,
+    ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT,
+    ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_PROPERTIES_NV,
+    eGraphicsShaderGroupCreateInfoNV = VK_STRUCTURE_TYPE_GRAPHICS_SHADER_GROUP_CREATE_INFO_NV,
+    eGraphicsPipelineShaderGroupsCreateInfoNV = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_SHADER_GROUPS_CREATE_INFO_NV,
+    eIndirectCommandsLayoutTokenNV = VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_TOKEN_NV,
+    eIndirectCommandsLayoutCreateInfoNV = VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NV,
+    eGeneratedCommandsInfoNV = VK_STRUCTURE_TYPE_GENERATED_COMMANDS_INFO_NV,
+    eGeneratedCommandsMemoryRequirementsInfoNV = VK_STRUCTURE_TYPE_GENERATED_COMMANDS_MEMORY_REQUIREMENTS_INFO_NV,
+    ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_FEATURES_NV,
+    ePhysicalDeviceTexelBufferAlignmentFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT,
+    ePhysicalDeviceTexelBufferAlignmentPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT,
+    eCommandBufferInheritanceRenderPassTransformInfoQCOM = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDER_PASS_TRANSFORM_INFO_QCOM,
+    eRenderPassTransformBeginInfoQCOM = VK_STRUCTURE_TYPE_RENDER_PASS_TRANSFORM_BEGIN_INFO_QCOM,
+    ePhysicalDeviceDeviceMemoryReportFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_MEMORY_REPORT_FEATURES_EXT,
+    eDeviceDeviceMemoryReportCreateInfoEXT = VK_STRUCTURE_TYPE_DEVICE_DEVICE_MEMORY_REPORT_CREATE_INFO_EXT,
+    eDeviceMemoryReportCallbackDataEXT = VK_STRUCTURE_TYPE_DEVICE_MEMORY_REPORT_CALLBACK_DATA_EXT,
+    ePhysicalDeviceRobustness2FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_FEATURES_EXT,
+    ePhysicalDeviceRobustness2PropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_PROPERTIES_EXT,
+    eSamplerCustomBorderColorCreateInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_CUSTOM_BORDER_COLOR_CREATE_INFO_EXT,
+    ePhysicalDeviceCustomBorderColorPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_PROPERTIES_EXT,
+    ePhysicalDeviceCustomBorderColorFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT,
+    ePipelineLibraryCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_LIBRARY_CREATE_INFO_KHR,
+    ePhysicalDevicePrivateDataFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES_EXT,
+    eDevicePrivateDataCreateInfoEXT = VK_STRUCTURE_TYPE_DEVICE_PRIVATE_DATA_CREATE_INFO_EXT,
+    ePrivateDataSlotCreateInfoEXT = VK_STRUCTURE_TYPE_PRIVATE_DATA_SLOT_CREATE_INFO_EXT,
+    ePhysicalDevicePipelineCreationCacheControlFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES_EXT,
+    ePhysicalDeviceDiagnosticsConfigFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DIAGNOSTICS_CONFIG_FEATURES_NV,
+    eDeviceDiagnosticsConfigCreateInfoNV = VK_STRUCTURE_TYPE_DEVICE_DIAGNOSTICS_CONFIG_CREATE_INFO_NV,
+    ePhysicalDeviceFragmentDensityMap2FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_FEATURES_EXT,
+    ePhysicalDeviceFragmentDensityMap2PropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_PROPERTIES_EXT,
+    ePhysicalDeviceImageRobustnessFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES_EXT,
+    eCopyBufferInfo2KHR = VK_STRUCTURE_TYPE_COPY_BUFFER_INFO_2_KHR,
+    eCopyImageInfo2KHR = VK_STRUCTURE_TYPE_COPY_IMAGE_INFO_2_KHR,
+    eCopyBufferToImageInfo2KHR = VK_STRUCTURE_TYPE_COPY_BUFFER_TO_IMAGE_INFO_2_KHR,
+    eCopyImageToBufferInfo2KHR = VK_STRUCTURE_TYPE_COPY_IMAGE_TO_BUFFER_INFO_2_KHR,
+    eBlitImageInfo2KHR = VK_STRUCTURE_TYPE_BLIT_IMAGE_INFO_2_KHR,
+    eResolveImageInfo2KHR = VK_STRUCTURE_TYPE_RESOLVE_IMAGE_INFO_2_KHR,
+    eBufferCopy2KHR = VK_STRUCTURE_TYPE_BUFFER_COPY_2_KHR,
+    eImageCopy2KHR = VK_STRUCTURE_TYPE_IMAGE_COPY_2_KHR,
+    eImageBlit2KHR = VK_STRUCTURE_TYPE_IMAGE_BLIT_2_KHR,
+    eBufferImageCopy2KHR = VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2_KHR,
+    eImageResolve2KHR = VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2_KHR,
+    ePhysicalDevice4444FormatsFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_4444_FORMATS_FEATURES_EXT,
+    eDirectfbSurfaceCreateInfoEXT = VK_STRUCTURE_TYPE_DIRECTFB_SURFACE_CREATE_INFO_EXT,
+    eAttachmentDescription2KHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR,
+    eAttachmentDescriptionStencilLayoutKHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT_KHR,
+    eAttachmentReference2KHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR,
+    eAttachmentReferenceStencilLayoutKHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT_KHR,
+    eBindAccelerationStructureMemoryInfoNV = VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV,
+    eBindBufferMemoryDeviceGroupInfoKHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO_KHR,
+    eBindBufferMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR,
+    eBindImageMemoryDeviceGroupInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO_KHR,
+    eBindImageMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR,
+    eBindImagePlaneMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO_KHR,
+    eBufferDeviceAddressInfoEXT = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_EXT,
+    eBufferDeviceAddressInfoKHR = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_KHR,
+    eBufferMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR,
+    eBufferOpaqueCaptureAddressCreateInfoKHR = VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO_KHR,
+    eDebugReportCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT,
+    eDescriptorSetLayoutBindingFlagsCreateInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT,
+    eDescriptorSetLayoutSupportKHR = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT_KHR,
+    eDescriptorSetVariableDescriptorCountAllocateInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT,
+    eDescriptorSetVariableDescriptorCountLayoutSupportEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT,
+    eDescriptorUpdateTemplateCreateInfoKHR = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO_KHR,
+    eDeviceGroupBindSparseInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO_KHR,
+    eDeviceGroupCommandBufferBeginInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO_KHR,
+    eDeviceGroupDeviceCreateInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO_KHR,
+    eDeviceGroupRenderPassBeginInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO_KHR,
+    eDeviceGroupSubmitInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO_KHR,
+    eDeviceMemoryOpaqueCaptureAddressInfoKHR = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO_KHR,
+    eExportFenceCreateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO_KHR,
+    eExportMemoryAllocateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR,
+    eExportSemaphoreCreateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO_KHR,
+    eExternalBufferPropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES_KHR,
+    eExternalFencePropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES_KHR,
+    eExternalImageFormatPropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES_KHR,
+    eExternalMemoryBufferCreateInfoKHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO_KHR,
+    eExternalMemoryImageCreateInfoKHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_KHR,
+    eExternalSemaphorePropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES_KHR,
+    eFormatProperties2KHR = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2_KHR,
+    eFramebufferAttachmentsCreateInfoKHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR,
+    eFramebufferAttachmentImageInfoKHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR,
+    eImageFormatListCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR,
+    eImageFormatProperties2KHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2_KHR,
+    eImageMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR,
+    eImagePlaneMemoryRequirementsInfoKHR = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO_KHR,
+    eImageSparseMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2_KHR,
+    eImageStencilUsageCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT,
+    eImageViewUsageCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO_KHR,
+    eMemoryAllocateFlagsInfoKHR = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR,
+    eMemoryDedicatedAllocateInfoKHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR,
+    eMemoryDedicatedRequirementsKHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR,
+    eMemoryOpaqueCaptureAddressAllocateInfoKHR = VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO_KHR,
+    eMemoryRequirements2KHR = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR,
+    ePhysicalDevice16BitStorageFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES_KHR,
+    ePhysicalDevice8BitStorageFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR,
+    ePhysicalDeviceBufferAddressFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_ADDRESS_FEATURES_EXT,
+    ePhysicalDeviceBufferDeviceAddressFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_KHR,
+    ePhysicalDeviceDepthStencilResolvePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR,
+    ePhysicalDeviceDescriptorIndexingFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT,
+    ePhysicalDeviceDescriptorIndexingPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT,
+    ePhysicalDeviceDriverPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR,
+    ePhysicalDeviceExternalBufferInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO_KHR,
+    ePhysicalDeviceExternalFenceInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO_KHR,
+    ePhysicalDeviceExternalImageFormatInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO_KHR,
+    ePhysicalDeviceExternalSemaphoreInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO_KHR,
+    ePhysicalDeviceFeatures2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR,
+    ePhysicalDeviceFloat16Int8FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES_KHR,
+    ePhysicalDeviceFloatControlsPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR,
+    ePhysicalDeviceGroupPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES_KHR,
+    ePhysicalDeviceHostQueryResetFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT,
+    ePhysicalDeviceIdPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES_KHR,
+    ePhysicalDeviceImagelessFramebufferFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR,
+    ePhysicalDeviceImageFormatInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2_KHR,
+    ePhysicalDeviceMaintenance3PropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES_KHR,
+    ePhysicalDeviceMemoryProperties2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR,
+    ePhysicalDeviceMultiviewFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES_KHR,
+    ePhysicalDeviceMultiviewPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES_KHR,
+    ePhysicalDevicePointClippingPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES_KHR,
+    ePhysicalDeviceProperties2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2_KHR,
+    ePhysicalDeviceSamplerFilterMinmaxPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT,
+    ePhysicalDeviceSamplerYcbcrConversionFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES_KHR,
+    ePhysicalDeviceScalarBlockLayoutFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT,
+    ePhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES_KHR,
+    ePhysicalDeviceShaderAtomicInt64FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR,
+    ePhysicalDeviceShaderDrawParameterFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES,
+    ePhysicalDeviceShaderFloat16Int8FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR,
+    ePhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES_KHR,
+    ePhysicalDeviceSparseImageFormatInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2_KHR,
+    ePhysicalDeviceTimelineSemaphoreFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR,
+    ePhysicalDeviceTimelineSemaphorePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES_KHR,
+    ePhysicalDeviceUniformBufferStandardLayoutFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR,
+    ePhysicalDeviceVariablePointersFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES_KHR,
+    ePhysicalDeviceVariablePointerFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES,
+    ePhysicalDeviceVariablePointerFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES_KHR,
+    ePhysicalDeviceVulkanMemoryModelFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR,
+    ePipelineTessellationDomainOriginStateCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO_KHR,
+    eQueryPoolCreateInfoINTEL = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL,
+    eQueueFamilyProperties2KHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR,
+    eRenderPassAttachmentBeginInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR,
+    eRenderPassCreateInfo2KHR = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR,
+    eRenderPassInputAttachmentAspectCreateInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO_KHR,
+    eRenderPassMultiviewCreateInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO_KHR,
+    eSamplerReductionModeCreateInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT,
+    eSamplerYcbcrConversionCreateInfoKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO_KHR,
+    eSamplerYcbcrConversionImageFormatPropertiesKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES_KHR,
+    eSamplerYcbcrConversionInfoKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO_KHR,
+    eSemaphoreSignalInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO_KHR,
+    eSemaphoreTypeCreateInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO_KHR,
+    eSemaphoreWaitInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO_KHR,
+    eSparseImageFormatProperties2KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2_KHR,
+    eSparseImageMemoryRequirements2KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2_KHR,
+    eSubpassBeginInfoKHR = VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR,
+    eSubpassDependency2KHR = VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR,
+    eSubpassDescription2KHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR,
+    eSubpassDescriptionDepthStencilResolveKHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR,
+    eSubpassEndInfoKHR = VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR,
+    eTimelineSemaphoreSubmitInfoKHR = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO_KHR,
+    eWriteDescriptorSetAccelerationStructureNV = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV
   };
 
-  struct ApplicationInfo
+  VULKAN_HPP_INLINE std::string to_string( StructureType value )
   {
-    ApplicationInfo( const char* pApplicationName_ = nullptr,
-                     uint32_t applicationVersion_ = 0,
-                     const char* pEngineName_ = nullptr,
-                     uint32_t engineVersion_ = 0,
-                     uint32_t apiVersion_ = 0 )
-      : pApplicationName( pApplicationName_ )
-      , applicationVersion( applicationVersion_ )
-      , pEngineName( pEngineName_ )
-      , engineVersion( engineVersion_ )
-      , apiVersion( apiVersion_ )
+    switch ( value )
     {
+      case StructureType::eApplicationInfo : return "ApplicationInfo";
+      case StructureType::eInstanceCreateInfo : return "InstanceCreateInfo";
+      case StructureType::eDeviceQueueCreateInfo : return "DeviceQueueCreateInfo";
+      case StructureType::eDeviceCreateInfo : return "DeviceCreateInfo";
+      case StructureType::eSubmitInfo : return "SubmitInfo";
+      case StructureType::eMemoryAllocateInfo : return "MemoryAllocateInfo";
+      case StructureType::eMappedMemoryRange : return "MappedMemoryRange";
+      case StructureType::eBindSparseInfo : return "BindSparseInfo";
+      case StructureType::eFenceCreateInfo : return "FenceCreateInfo";
+      case StructureType::eSemaphoreCreateInfo : return "SemaphoreCreateInfo";
+      case StructureType::eEventCreateInfo : return "EventCreateInfo";
+      case StructureType::eQueryPoolCreateInfo : return "QueryPoolCreateInfo";
+      case StructureType::eBufferCreateInfo : return "BufferCreateInfo";
+      case StructureType::eBufferViewCreateInfo : return "BufferViewCreateInfo";
+      case StructureType::eImageCreateInfo : return "ImageCreateInfo";
+      case StructureType::eImageViewCreateInfo : return "ImageViewCreateInfo";
+      case StructureType::eShaderModuleCreateInfo : return "ShaderModuleCreateInfo";
+      case StructureType::ePipelineCacheCreateInfo : return "PipelineCacheCreateInfo";
+      case StructureType::ePipelineShaderStageCreateInfo : return "PipelineShaderStageCreateInfo";
+      case StructureType::ePipelineVertexInputStateCreateInfo : return "PipelineVertexInputStateCreateInfo";
+      case StructureType::ePipelineInputAssemblyStateCreateInfo : return "PipelineInputAssemblyStateCreateInfo";
+      case StructureType::ePipelineTessellationStateCreateInfo : return "PipelineTessellationStateCreateInfo";
+      case StructureType::ePipelineViewportStateCreateInfo : return "PipelineViewportStateCreateInfo";
+      case StructureType::ePipelineRasterizationStateCreateInfo : return "PipelineRasterizationStateCreateInfo";
+      case StructureType::ePipelineMultisampleStateCreateInfo : return "PipelineMultisampleStateCreateInfo";
+      case StructureType::ePipelineDepthStencilStateCreateInfo : return "PipelineDepthStencilStateCreateInfo";
+      case StructureType::ePipelineColorBlendStateCreateInfo : return "PipelineColorBlendStateCreateInfo";
+      case StructureType::ePipelineDynamicStateCreateInfo : return "PipelineDynamicStateCreateInfo";
+      case StructureType::eGraphicsPipelineCreateInfo : return "GraphicsPipelineCreateInfo";
+      case StructureType::eComputePipelineCreateInfo : return "ComputePipelineCreateInfo";
+      case StructureType::ePipelineLayoutCreateInfo : return "PipelineLayoutCreateInfo";
+      case StructureType::eSamplerCreateInfo : return "SamplerCreateInfo";
+      case StructureType::eDescriptorSetLayoutCreateInfo : return "DescriptorSetLayoutCreateInfo";
+      case StructureType::eDescriptorPoolCreateInfo : return "DescriptorPoolCreateInfo";
+      case StructureType::eDescriptorSetAllocateInfo : return "DescriptorSetAllocateInfo";
+      case StructureType::eWriteDescriptorSet : return "WriteDescriptorSet";
+      case StructureType::eCopyDescriptorSet : return "CopyDescriptorSet";
+      case StructureType::eFramebufferCreateInfo : return "FramebufferCreateInfo";
+      case StructureType::eRenderPassCreateInfo : return "RenderPassCreateInfo";
+      case StructureType::eCommandPoolCreateInfo : return "CommandPoolCreateInfo";
+      case StructureType::eCommandBufferAllocateInfo : return "CommandBufferAllocateInfo";
+      case StructureType::eCommandBufferInheritanceInfo : return "CommandBufferInheritanceInfo";
+      case StructureType::eCommandBufferBeginInfo : return "CommandBufferBeginInfo";
+      case StructureType::eRenderPassBeginInfo : return "RenderPassBeginInfo";
+      case StructureType::eBufferMemoryBarrier : return "BufferMemoryBarrier";
+      case StructureType::eImageMemoryBarrier : return "ImageMemoryBarrier";
+      case StructureType::eMemoryBarrier : return "MemoryBarrier";
+      case StructureType::eLoaderInstanceCreateInfo : return "LoaderInstanceCreateInfo";
+      case StructureType::eLoaderDeviceCreateInfo : return "LoaderDeviceCreateInfo";
+      case StructureType::ePhysicalDeviceSubgroupProperties : return "PhysicalDeviceSubgroupProperties";
+      case StructureType::eBindBufferMemoryInfo : return "BindBufferMemoryInfo";
+      case StructureType::eBindImageMemoryInfo : return "BindImageMemoryInfo";
+      case StructureType::ePhysicalDevice16BitStorageFeatures : return "PhysicalDevice16BitStorageFeatures";
+      case StructureType::eMemoryDedicatedRequirements : return "MemoryDedicatedRequirements";
+      case StructureType::eMemoryDedicatedAllocateInfo : return "MemoryDedicatedAllocateInfo";
+      case StructureType::eMemoryAllocateFlagsInfo : return "MemoryAllocateFlagsInfo";
+      case StructureType::eDeviceGroupRenderPassBeginInfo : return "DeviceGroupRenderPassBeginInfo";
+      case StructureType::eDeviceGroupCommandBufferBeginInfo : return "DeviceGroupCommandBufferBeginInfo";
+      case StructureType::eDeviceGroupSubmitInfo : return "DeviceGroupSubmitInfo";
+      case StructureType::eDeviceGroupBindSparseInfo : return "DeviceGroupBindSparseInfo";
+      case StructureType::eBindBufferMemoryDeviceGroupInfo : return "BindBufferMemoryDeviceGroupInfo";
+      case StructureType::eBindImageMemoryDeviceGroupInfo : return "BindImageMemoryDeviceGroupInfo";
+      case StructureType::ePhysicalDeviceGroupProperties : return "PhysicalDeviceGroupProperties";
+      case StructureType::eDeviceGroupDeviceCreateInfo : return "DeviceGroupDeviceCreateInfo";
+      case StructureType::eBufferMemoryRequirementsInfo2 : return "BufferMemoryRequirementsInfo2";
+      case StructureType::eImageMemoryRequirementsInfo2 : return "ImageMemoryRequirementsInfo2";
+      case StructureType::eImageSparseMemoryRequirementsInfo2 : return "ImageSparseMemoryRequirementsInfo2";
+      case StructureType::eMemoryRequirements2 : return "MemoryRequirements2";
+      case StructureType::eSparseImageMemoryRequirements2 : return "SparseImageMemoryRequirements2";
+      case StructureType::ePhysicalDeviceFeatures2 : return "PhysicalDeviceFeatures2";
+      case StructureType::ePhysicalDeviceProperties2 : return "PhysicalDeviceProperties2";
+      case StructureType::eFormatProperties2 : return "FormatProperties2";
+      case StructureType::eImageFormatProperties2 : return "ImageFormatProperties2";
+      case StructureType::ePhysicalDeviceImageFormatInfo2 : return "PhysicalDeviceImageFormatInfo2";
+      case StructureType::eQueueFamilyProperties2 : return "QueueFamilyProperties2";
+      case StructureType::ePhysicalDeviceMemoryProperties2 : return "PhysicalDeviceMemoryProperties2";
+      case StructureType::eSparseImageFormatProperties2 : return "SparseImageFormatProperties2";
+      case StructureType::ePhysicalDeviceSparseImageFormatInfo2 : return "PhysicalDeviceSparseImageFormatInfo2";
+      case StructureType::ePhysicalDevicePointClippingProperties : return "PhysicalDevicePointClippingProperties";
+      case StructureType::eRenderPassInputAttachmentAspectCreateInfo : return "RenderPassInputAttachmentAspectCreateInfo";
+      case StructureType::eImageViewUsageCreateInfo : return "ImageViewUsageCreateInfo";
+      case StructureType::ePipelineTessellationDomainOriginStateCreateInfo : return "PipelineTessellationDomainOriginStateCreateInfo";
+      case StructureType::eRenderPassMultiviewCreateInfo : return "RenderPassMultiviewCreateInfo";
+      case StructureType::ePhysicalDeviceMultiviewFeatures : return "PhysicalDeviceMultiviewFeatures";
+      case StructureType::ePhysicalDeviceMultiviewProperties : return "PhysicalDeviceMultiviewProperties";
+      case StructureType::ePhysicalDeviceVariablePointersFeatures : return "PhysicalDeviceVariablePointersFeatures";
+      case StructureType::eProtectedSubmitInfo : return "ProtectedSubmitInfo";
+      case StructureType::ePhysicalDeviceProtectedMemoryFeatures : return "PhysicalDeviceProtectedMemoryFeatures";
+      case StructureType::ePhysicalDeviceProtectedMemoryProperties : return "PhysicalDeviceProtectedMemoryProperties";
+      case StructureType::eDeviceQueueInfo2 : return "DeviceQueueInfo2";
+      case StructureType::eSamplerYcbcrConversionCreateInfo : return "SamplerYcbcrConversionCreateInfo";
+      case StructureType::eSamplerYcbcrConversionInfo : return "SamplerYcbcrConversionInfo";
+      case StructureType::eBindImagePlaneMemoryInfo : return "BindImagePlaneMemoryInfo";
+      case StructureType::eImagePlaneMemoryRequirementsInfo : return "ImagePlaneMemoryRequirementsInfo";
+      case StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures : return "PhysicalDeviceSamplerYcbcrConversionFeatures";
+      case StructureType::eSamplerYcbcrConversionImageFormatProperties : return "SamplerYcbcrConversionImageFormatProperties";
+      case StructureType::eDescriptorUpdateTemplateCreateInfo : return "DescriptorUpdateTemplateCreateInfo";
+      case StructureType::ePhysicalDeviceExternalImageFormatInfo : return "PhysicalDeviceExternalImageFormatInfo";
+      case StructureType::eExternalImageFormatProperties : return "ExternalImageFormatProperties";
+      case StructureType::ePhysicalDeviceExternalBufferInfo : return "PhysicalDeviceExternalBufferInfo";
+      case StructureType::eExternalBufferProperties : return "ExternalBufferProperties";
+      case StructureType::ePhysicalDeviceIdProperties : return "PhysicalDeviceIdProperties";
+      case StructureType::eExternalMemoryBufferCreateInfo : return "ExternalMemoryBufferCreateInfo";
+      case StructureType::eExternalMemoryImageCreateInfo : return "ExternalMemoryImageCreateInfo";
+      case StructureType::eExportMemoryAllocateInfo : return "ExportMemoryAllocateInfo";
+      case StructureType::ePhysicalDeviceExternalFenceInfo : return "PhysicalDeviceExternalFenceInfo";
+      case StructureType::eExternalFenceProperties : return "ExternalFenceProperties";
+      case StructureType::eExportFenceCreateInfo : return "ExportFenceCreateInfo";
+      case StructureType::eExportSemaphoreCreateInfo : return "ExportSemaphoreCreateInfo";
+      case StructureType::ePhysicalDeviceExternalSemaphoreInfo : return "PhysicalDeviceExternalSemaphoreInfo";
+      case StructureType::eExternalSemaphoreProperties : return "ExternalSemaphoreProperties";
+      case StructureType::ePhysicalDeviceMaintenance3Properties : return "PhysicalDeviceMaintenance3Properties";
+      case StructureType::eDescriptorSetLayoutSupport : return "DescriptorSetLayoutSupport";
+      case StructureType::ePhysicalDeviceShaderDrawParametersFeatures : return "PhysicalDeviceShaderDrawParametersFeatures";
+      case StructureType::ePhysicalDeviceVulkan11Features : return "PhysicalDeviceVulkan11Features";
+      case StructureType::ePhysicalDeviceVulkan11Properties : return "PhysicalDeviceVulkan11Properties";
+      case StructureType::ePhysicalDeviceVulkan12Features : return "PhysicalDeviceVulkan12Features";
+      case StructureType::ePhysicalDeviceVulkan12Properties : return "PhysicalDeviceVulkan12Properties";
+      case StructureType::eImageFormatListCreateInfo : return "ImageFormatListCreateInfo";
+      case StructureType::eAttachmentDescription2 : return "AttachmentDescription2";
+      case StructureType::eAttachmentReference2 : return "AttachmentReference2";
+      case StructureType::eSubpassDescription2 : return "SubpassDescription2";
+      case StructureType::eSubpassDependency2 : return "SubpassDependency2";
+      case StructureType::eRenderPassCreateInfo2 : return "RenderPassCreateInfo2";
+      case StructureType::eSubpassBeginInfo : return "SubpassBeginInfo";
+      case StructureType::eSubpassEndInfo : return "SubpassEndInfo";
+      case StructureType::ePhysicalDevice8BitStorageFeatures : return "PhysicalDevice8BitStorageFeatures";
+      case StructureType::ePhysicalDeviceDriverProperties : return "PhysicalDeviceDriverProperties";
+      case StructureType::ePhysicalDeviceShaderAtomicInt64Features : return "PhysicalDeviceShaderAtomicInt64Features";
+      case StructureType::ePhysicalDeviceShaderFloat16Int8Features : return "PhysicalDeviceShaderFloat16Int8Features";
+      case StructureType::ePhysicalDeviceFloatControlsProperties : return "PhysicalDeviceFloatControlsProperties";
+      case StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo : return "DescriptorSetLayoutBindingFlagsCreateInfo";
+      case StructureType::ePhysicalDeviceDescriptorIndexingFeatures : return "PhysicalDeviceDescriptorIndexingFeatures";
+      case StructureType::ePhysicalDeviceDescriptorIndexingProperties : return "PhysicalDeviceDescriptorIndexingProperties";
+      case StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo : return "DescriptorSetVariableDescriptorCountAllocateInfo";
+      case StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport : return "DescriptorSetVariableDescriptorCountLayoutSupport";
+      case StructureType::ePhysicalDeviceDepthStencilResolveProperties : return "PhysicalDeviceDepthStencilResolveProperties";
+      case StructureType::eSubpassDescriptionDepthStencilResolve : return "SubpassDescriptionDepthStencilResolve";
+      case StructureType::ePhysicalDeviceScalarBlockLayoutFeatures : return "PhysicalDeviceScalarBlockLayoutFeatures";
+      case StructureType::eImageStencilUsageCreateInfo : return "ImageStencilUsageCreateInfo";
+      case StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties : return "PhysicalDeviceSamplerFilterMinmaxProperties";
+      case StructureType::eSamplerReductionModeCreateInfo : return "SamplerReductionModeCreateInfo";
+      case StructureType::ePhysicalDeviceVulkanMemoryModelFeatures : return "PhysicalDeviceVulkanMemoryModelFeatures";
+      case StructureType::ePhysicalDeviceImagelessFramebufferFeatures : return "PhysicalDeviceImagelessFramebufferFeatures";
+      case StructureType::eFramebufferAttachmentsCreateInfo : return "FramebufferAttachmentsCreateInfo";
+      case StructureType::eFramebufferAttachmentImageInfo : return "FramebufferAttachmentImageInfo";
+      case StructureType::eRenderPassAttachmentBeginInfo : return "RenderPassAttachmentBeginInfo";
+      case StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures : return "PhysicalDeviceUniformBufferStandardLayoutFeatures";
+      case StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures : return "PhysicalDeviceShaderSubgroupExtendedTypesFeatures";
+      case StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures : return "PhysicalDeviceSeparateDepthStencilLayoutsFeatures";
+      case StructureType::eAttachmentReferenceStencilLayout : return "AttachmentReferenceStencilLayout";
+      case StructureType::eAttachmentDescriptionStencilLayout : return "AttachmentDescriptionStencilLayout";
+      case StructureType::ePhysicalDeviceHostQueryResetFeatures : return "PhysicalDeviceHostQueryResetFeatures";
+      case StructureType::ePhysicalDeviceTimelineSemaphoreFeatures : return "PhysicalDeviceTimelineSemaphoreFeatures";
+      case StructureType::ePhysicalDeviceTimelineSemaphoreProperties : return "PhysicalDeviceTimelineSemaphoreProperties";
+      case StructureType::eSemaphoreTypeCreateInfo : return "SemaphoreTypeCreateInfo";
+      case StructureType::eTimelineSemaphoreSubmitInfo : return "TimelineSemaphoreSubmitInfo";
+      case StructureType::eSemaphoreWaitInfo : return "SemaphoreWaitInfo";
+      case StructureType::eSemaphoreSignalInfo : return "SemaphoreSignalInfo";
+      case StructureType::ePhysicalDeviceBufferDeviceAddressFeatures : return "PhysicalDeviceBufferDeviceAddressFeatures";
+      case StructureType::eBufferDeviceAddressInfo : return "BufferDeviceAddressInfo";
+      case StructureType::eBufferOpaqueCaptureAddressCreateInfo : return "BufferOpaqueCaptureAddressCreateInfo";
+      case StructureType::eMemoryOpaqueCaptureAddressAllocateInfo : return "MemoryOpaqueCaptureAddressAllocateInfo";
+      case StructureType::eDeviceMemoryOpaqueCaptureAddressInfo : return "DeviceMemoryOpaqueCaptureAddressInfo";
+      case StructureType::eSwapchainCreateInfoKHR : return "SwapchainCreateInfoKHR";
+      case StructureType::ePresentInfoKHR : return "PresentInfoKHR";
+      case StructureType::eDeviceGroupPresentCapabilitiesKHR : return "DeviceGroupPresentCapabilitiesKHR";
+      case StructureType::eImageSwapchainCreateInfoKHR : return "ImageSwapchainCreateInfoKHR";
+      case StructureType::eBindImageMemorySwapchainInfoKHR : return "BindImageMemorySwapchainInfoKHR";
+      case StructureType::eAcquireNextImageInfoKHR : return "AcquireNextImageInfoKHR";
+      case StructureType::eDeviceGroupPresentInfoKHR : return "DeviceGroupPresentInfoKHR";
+      case StructureType::eDeviceGroupSwapchainCreateInfoKHR : return "DeviceGroupSwapchainCreateInfoKHR";
+      case StructureType::eDisplayModeCreateInfoKHR : return "DisplayModeCreateInfoKHR";
+      case StructureType::eDisplaySurfaceCreateInfoKHR : return "DisplaySurfaceCreateInfoKHR";
+      case StructureType::eDisplayPresentInfoKHR : return "DisplayPresentInfoKHR";
+      case StructureType::eXlibSurfaceCreateInfoKHR : return "XlibSurfaceCreateInfoKHR";
+      case StructureType::eXcbSurfaceCreateInfoKHR : return "XcbSurfaceCreateInfoKHR";
+      case StructureType::eWaylandSurfaceCreateInfoKHR : return "WaylandSurfaceCreateInfoKHR";
+      case StructureType::eAndroidSurfaceCreateInfoKHR : return "AndroidSurfaceCreateInfoKHR";
+      case StructureType::eWin32SurfaceCreateInfoKHR : return "Win32SurfaceCreateInfoKHR";
+      case StructureType::eDebugReportCallbackCreateInfoEXT : return "DebugReportCallbackCreateInfoEXT";
+      case StructureType::ePipelineRasterizationStateRasterizationOrderAMD : return "PipelineRasterizationStateRasterizationOrderAMD";
+      case StructureType::eDebugMarkerObjectNameInfoEXT : return "DebugMarkerObjectNameInfoEXT";
+      case StructureType::eDebugMarkerObjectTagInfoEXT : return "DebugMarkerObjectTagInfoEXT";
+      case StructureType::eDebugMarkerMarkerInfoEXT : return "DebugMarkerMarkerInfoEXT";
+      case StructureType::eDedicatedAllocationImageCreateInfoNV : return "DedicatedAllocationImageCreateInfoNV";
+      case StructureType::eDedicatedAllocationBufferCreateInfoNV : return "DedicatedAllocationBufferCreateInfoNV";
+      case StructureType::eDedicatedAllocationMemoryAllocateInfoNV : return "DedicatedAllocationMemoryAllocateInfoNV";
+      case StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT : return "PhysicalDeviceTransformFeedbackFeaturesEXT";
+      case StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT : return "PhysicalDeviceTransformFeedbackPropertiesEXT";
+      case StructureType::ePipelineRasterizationStateStreamCreateInfoEXT : return "PipelineRasterizationStateStreamCreateInfoEXT";
+      case StructureType::eImageViewHandleInfoNVX : return "ImageViewHandleInfoNVX";
+      case StructureType::eImageViewAddressPropertiesNVX : return "ImageViewAddressPropertiesNVX";
+      case StructureType::eTextureLodGatherFormatPropertiesAMD : return "TextureLodGatherFormatPropertiesAMD";
+      case StructureType::eStreamDescriptorSurfaceCreateInfoGGP : return "StreamDescriptorSurfaceCreateInfoGGP";
+      case StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV : return "PhysicalDeviceCornerSampledImageFeaturesNV";
+      case StructureType::eExternalMemoryImageCreateInfoNV : return "ExternalMemoryImageCreateInfoNV";
+      case StructureType::eExportMemoryAllocateInfoNV : return "ExportMemoryAllocateInfoNV";
+      case StructureType::eImportMemoryWin32HandleInfoNV : return "ImportMemoryWin32HandleInfoNV";
+      case StructureType::eExportMemoryWin32HandleInfoNV : return "ExportMemoryWin32HandleInfoNV";
+      case StructureType::eWin32KeyedMutexAcquireReleaseInfoNV : return "Win32KeyedMutexAcquireReleaseInfoNV";
+      case StructureType::eValidationFlagsEXT : return "ValidationFlagsEXT";
+      case StructureType::eViSurfaceCreateInfoNN : return "ViSurfaceCreateInfoNN";
+      case StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeaturesEXT : return "PhysicalDeviceTextureCompressionAstcHdrFeaturesEXT";
+      case StructureType::eImageViewAstcDecodeModeEXT : return "ImageViewAstcDecodeModeEXT";
+      case StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT : return "PhysicalDeviceAstcDecodeFeaturesEXT";
+      case StructureType::eImportMemoryWin32HandleInfoKHR : return "ImportMemoryWin32HandleInfoKHR";
+      case StructureType::eExportMemoryWin32HandleInfoKHR : return "ExportMemoryWin32HandleInfoKHR";
+      case StructureType::eMemoryWin32HandlePropertiesKHR : return "MemoryWin32HandlePropertiesKHR";
+      case StructureType::eMemoryGetWin32HandleInfoKHR : return "MemoryGetWin32HandleInfoKHR";
+      case StructureType::eImportMemoryFdInfoKHR : return "ImportMemoryFdInfoKHR";
+      case StructureType::eMemoryFdPropertiesKHR : return "MemoryFdPropertiesKHR";
+      case StructureType::eMemoryGetFdInfoKHR : return "MemoryGetFdInfoKHR";
+      case StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR : return "Win32KeyedMutexAcquireReleaseInfoKHR";
+      case StructureType::eImportSemaphoreWin32HandleInfoKHR : return "ImportSemaphoreWin32HandleInfoKHR";
+      case StructureType::eExportSemaphoreWin32HandleInfoKHR : return "ExportSemaphoreWin32HandleInfoKHR";
+      case StructureType::eD3D12FenceSubmitInfoKHR : return "D3D12FenceSubmitInfoKHR";
+      case StructureType::eSemaphoreGetWin32HandleInfoKHR : return "SemaphoreGetWin32HandleInfoKHR";
+      case StructureType::eImportSemaphoreFdInfoKHR : return "ImportSemaphoreFdInfoKHR";
+      case StructureType::eSemaphoreGetFdInfoKHR : return "SemaphoreGetFdInfoKHR";
+      case StructureType::ePhysicalDevicePushDescriptorPropertiesKHR : return "PhysicalDevicePushDescriptorPropertiesKHR";
+      case StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT : return "CommandBufferInheritanceConditionalRenderingInfoEXT";
+      case StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT : return "PhysicalDeviceConditionalRenderingFeaturesEXT";
+      case StructureType::eConditionalRenderingBeginInfoEXT : return "ConditionalRenderingBeginInfoEXT";
+      case StructureType::ePresentRegionsKHR : return "PresentRegionsKHR";
+      case StructureType::ePipelineViewportWScalingStateCreateInfoNV : return "PipelineViewportWScalingStateCreateInfoNV";
+      case StructureType::eSurfaceCapabilities2EXT : return "SurfaceCapabilities2EXT";
+      case StructureType::eDisplayPowerInfoEXT : return "DisplayPowerInfoEXT";
+      case StructureType::eDeviceEventInfoEXT : return "DeviceEventInfoEXT";
+      case StructureType::eDisplayEventInfoEXT : return "DisplayEventInfoEXT";
+      case StructureType::eSwapchainCounterCreateInfoEXT : return "SwapchainCounterCreateInfoEXT";
+      case StructureType::ePresentTimesInfoGOOGLE : return "PresentTimesInfoGOOGLE";
+      case StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX : return "PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX";
+      case StructureType::ePipelineViewportSwizzleStateCreateInfoNV : return "PipelineViewportSwizzleStateCreateInfoNV";
+      case StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT : return "PhysicalDeviceDiscardRectanglePropertiesEXT";
+      case StructureType::ePipelineDiscardRectangleStateCreateInfoEXT : return "PipelineDiscardRectangleStateCreateInfoEXT";
+      case StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT : return "PhysicalDeviceConservativeRasterizationPropertiesEXT";
+      case StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT : return "PipelineRasterizationConservativeStateCreateInfoEXT";
+      case StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT : return "PhysicalDeviceDepthClipEnableFeaturesEXT";
+      case StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT : return "PipelineRasterizationDepthClipStateCreateInfoEXT";
+      case StructureType::eHdrMetadataEXT : return "HdrMetadataEXT";
+      case StructureType::eSharedPresentSurfaceCapabilitiesKHR : return "SharedPresentSurfaceCapabilitiesKHR";
+      case StructureType::eImportFenceWin32HandleInfoKHR : return "ImportFenceWin32HandleInfoKHR";
+      case StructureType::eExportFenceWin32HandleInfoKHR : return "ExportFenceWin32HandleInfoKHR";
+      case StructureType::eFenceGetWin32HandleInfoKHR : return "FenceGetWin32HandleInfoKHR";
+      case StructureType::eImportFenceFdInfoKHR : return "ImportFenceFdInfoKHR";
+      case StructureType::eFenceGetFdInfoKHR : return "FenceGetFdInfoKHR";
+      case StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR : return "PhysicalDevicePerformanceQueryFeaturesKHR";
+      case StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR : return "PhysicalDevicePerformanceQueryPropertiesKHR";
+      case StructureType::eQueryPoolPerformanceCreateInfoKHR : return "QueryPoolPerformanceCreateInfoKHR";
+      case StructureType::ePerformanceQuerySubmitInfoKHR : return "PerformanceQuerySubmitInfoKHR";
+      case StructureType::eAcquireProfilingLockInfoKHR : return "AcquireProfilingLockInfoKHR";
+      case StructureType::ePerformanceCounterKHR : return "PerformanceCounterKHR";
+      case StructureType::ePerformanceCounterDescriptionKHR : return "PerformanceCounterDescriptionKHR";
+      case StructureType::ePhysicalDeviceSurfaceInfo2KHR : return "PhysicalDeviceSurfaceInfo2KHR";
+      case StructureType::eSurfaceCapabilities2KHR : return "SurfaceCapabilities2KHR";
+      case StructureType::eSurfaceFormat2KHR : return "SurfaceFormat2KHR";
+      case StructureType::eDisplayProperties2KHR : return "DisplayProperties2KHR";
+      case StructureType::eDisplayPlaneProperties2KHR : return "DisplayPlaneProperties2KHR";
+      case StructureType::eDisplayModeProperties2KHR : return "DisplayModeProperties2KHR";
+      case StructureType::eDisplayPlaneInfo2KHR : return "DisplayPlaneInfo2KHR";
+      case StructureType::eDisplayPlaneCapabilities2KHR : return "DisplayPlaneCapabilities2KHR";
+      case StructureType::eIosSurfaceCreateInfoMVK : return "IosSurfaceCreateInfoMVK";
+      case StructureType::eMacosSurfaceCreateInfoMVK : return "MacosSurfaceCreateInfoMVK";
+      case StructureType::eDebugUtilsObjectNameInfoEXT : return "DebugUtilsObjectNameInfoEXT";
+      case StructureType::eDebugUtilsObjectTagInfoEXT : return "DebugUtilsObjectTagInfoEXT";
+      case StructureType::eDebugUtilsLabelEXT : return "DebugUtilsLabelEXT";
+      case StructureType::eDebugUtilsMessengerCallbackDataEXT : return "DebugUtilsMessengerCallbackDataEXT";
+      case StructureType::eDebugUtilsMessengerCreateInfoEXT : return "DebugUtilsMessengerCreateInfoEXT";
+      case StructureType::eAndroidHardwareBufferUsageANDROID : return "AndroidHardwareBufferUsageANDROID";
+      case StructureType::eAndroidHardwareBufferPropertiesANDROID : return "AndroidHardwareBufferPropertiesANDROID";
+      case StructureType::eAndroidHardwareBufferFormatPropertiesANDROID : return "AndroidHardwareBufferFormatPropertiesANDROID";
+      case StructureType::eImportAndroidHardwareBufferInfoANDROID : return "ImportAndroidHardwareBufferInfoANDROID";
+      case StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID : return "MemoryGetAndroidHardwareBufferInfoANDROID";
+      case StructureType::eExternalFormatANDROID : return "ExternalFormatANDROID";
+      case StructureType::ePhysicalDeviceInlineUniformBlockFeaturesEXT : return "PhysicalDeviceInlineUniformBlockFeaturesEXT";
+      case StructureType::ePhysicalDeviceInlineUniformBlockPropertiesEXT : return "PhysicalDeviceInlineUniformBlockPropertiesEXT";
+      case StructureType::eWriteDescriptorSetInlineUniformBlockEXT : return "WriteDescriptorSetInlineUniformBlockEXT";
+      case StructureType::eDescriptorPoolInlineUniformBlockCreateInfoEXT : return "DescriptorPoolInlineUniformBlockCreateInfoEXT";
+      case StructureType::eSampleLocationsInfoEXT : return "SampleLocationsInfoEXT";
+      case StructureType::eRenderPassSampleLocationsBeginInfoEXT : return "RenderPassSampleLocationsBeginInfoEXT";
+      case StructureType::ePipelineSampleLocationsStateCreateInfoEXT : return "PipelineSampleLocationsStateCreateInfoEXT";
+      case StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT : return "PhysicalDeviceSampleLocationsPropertiesEXT";
+      case StructureType::eMultisamplePropertiesEXT : return "MultisamplePropertiesEXT";
+      case StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT : return "PhysicalDeviceBlendOperationAdvancedFeaturesEXT";
+      case StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT : return "PhysicalDeviceBlendOperationAdvancedPropertiesEXT";
+      case StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT : return "PipelineColorBlendAdvancedStateCreateInfoEXT";
+      case StructureType::ePipelineCoverageToColorStateCreateInfoNV : return "PipelineCoverageToColorStateCreateInfoNV";
+      case StructureType::eBindAccelerationStructureMemoryInfoKHR : return "BindAccelerationStructureMemoryInfoKHR";
+      case StructureType::eWriteDescriptorSetAccelerationStructureKHR : return "WriteDescriptorSetAccelerationStructureKHR";
+      case StructureType::eAccelerationStructureBuildGeometryInfoKHR : return "AccelerationStructureBuildGeometryInfoKHR";
+      case StructureType::eAccelerationStructureCreateGeometryTypeInfoKHR : return "AccelerationStructureCreateGeometryTypeInfoKHR";
+      case StructureType::eAccelerationStructureDeviceAddressInfoKHR : return "AccelerationStructureDeviceAddressInfoKHR";
+      case StructureType::eAccelerationStructureGeometryAabbsDataKHR : return "AccelerationStructureGeometryAabbsDataKHR";
+      case StructureType::eAccelerationStructureGeometryInstancesDataKHR : return "AccelerationStructureGeometryInstancesDataKHR";
+      case StructureType::eAccelerationStructureGeometryTrianglesDataKHR : return "AccelerationStructureGeometryTrianglesDataKHR";
+      case StructureType::eAccelerationStructureGeometryKHR : return "AccelerationStructureGeometryKHR";
+      case StructureType::eAccelerationStructureMemoryRequirementsInfoKHR : return "AccelerationStructureMemoryRequirementsInfoKHR";
+      case StructureType::eAccelerationStructureVersionKHR : return "AccelerationStructureVersionKHR";
+      case StructureType::eCopyAccelerationStructureInfoKHR : return "CopyAccelerationStructureInfoKHR";
+      case StructureType::eCopyAccelerationStructureToMemoryInfoKHR : return "CopyAccelerationStructureToMemoryInfoKHR";
+      case StructureType::eCopyMemoryToAccelerationStructureInfoKHR : return "CopyMemoryToAccelerationStructureInfoKHR";
+      case StructureType::ePhysicalDeviceRayTracingFeaturesKHR : return "PhysicalDeviceRayTracingFeaturesKHR";
+      case StructureType::ePhysicalDeviceRayTracingPropertiesKHR : return "PhysicalDeviceRayTracingPropertiesKHR";
+      case StructureType::eRayTracingPipelineCreateInfoKHR : return "RayTracingPipelineCreateInfoKHR";
+      case StructureType::eRayTracingShaderGroupCreateInfoKHR : return "RayTracingShaderGroupCreateInfoKHR";
+      case StructureType::eAccelerationStructureCreateInfoKHR : return "AccelerationStructureCreateInfoKHR";
+      case StructureType::eRayTracingPipelineInterfaceCreateInfoKHR : return "RayTracingPipelineInterfaceCreateInfoKHR";
+      case StructureType::ePipelineCoverageModulationStateCreateInfoNV : return "PipelineCoverageModulationStateCreateInfoNV";
+      case StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV : return "PhysicalDeviceShaderSmBuiltinsFeaturesNV";
+      case StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV : return "PhysicalDeviceShaderSmBuiltinsPropertiesNV";
+      case StructureType::eDrmFormatModifierPropertiesListEXT : return "DrmFormatModifierPropertiesListEXT";
+      case StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT : return "PhysicalDeviceImageDrmFormatModifierInfoEXT";
+      case StructureType::eImageDrmFormatModifierListCreateInfoEXT : return "ImageDrmFormatModifierListCreateInfoEXT";
+      case StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT : return "ImageDrmFormatModifierExplicitCreateInfoEXT";
+      case StructureType::eImageDrmFormatModifierPropertiesEXT : return "ImageDrmFormatModifierPropertiesEXT";
+      case StructureType::eValidationCacheCreateInfoEXT : return "ValidationCacheCreateInfoEXT";
+      case StructureType::eShaderModuleValidationCacheCreateInfoEXT : return "ShaderModuleValidationCacheCreateInfoEXT";
+      case StructureType::ePhysicalDevicePortabilitySubsetFeaturesKHR : return "PhysicalDevicePortabilitySubsetFeaturesKHR";
+      case StructureType::ePhysicalDevicePortabilitySubsetPropertiesKHR : return "PhysicalDevicePortabilitySubsetPropertiesKHR";
+      case StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV : return "PipelineViewportShadingRateImageStateCreateInfoNV";
+      case StructureType::ePhysicalDeviceShadingRateImageFeaturesNV : return "PhysicalDeviceShadingRateImageFeaturesNV";
+      case StructureType::ePhysicalDeviceShadingRateImagePropertiesNV : return "PhysicalDeviceShadingRateImagePropertiesNV";
+      case StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV : return "PipelineViewportCoarseSampleOrderStateCreateInfoNV";
+      case StructureType::eRayTracingPipelineCreateInfoNV : return "RayTracingPipelineCreateInfoNV";
+      case StructureType::eAccelerationStructureCreateInfoNV : return "AccelerationStructureCreateInfoNV";
+      case StructureType::eGeometryNV : return "GeometryNV";
+      case StructureType::eGeometryTrianglesNV : return "GeometryTrianglesNV";
+      case StructureType::eGeometryAabbNV : return "GeometryAabbNV";
+      case StructureType::eAccelerationStructureMemoryRequirementsInfoNV : return "AccelerationStructureMemoryRequirementsInfoNV";
+      case StructureType::ePhysicalDeviceRayTracingPropertiesNV : return "PhysicalDeviceRayTracingPropertiesNV";
+      case StructureType::eRayTracingShaderGroupCreateInfoNV : return "RayTracingShaderGroupCreateInfoNV";
+      case StructureType::eAccelerationStructureInfoNV : return "AccelerationStructureInfoNV";
+      case StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV : return "PhysicalDeviceRepresentativeFragmentTestFeaturesNV";
+      case StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV : return "PipelineRepresentativeFragmentTestStateCreateInfoNV";
+      case StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT : return "PhysicalDeviceImageViewImageFormatInfoEXT";
+      case StructureType::eFilterCubicImageViewImageFormatPropertiesEXT : return "FilterCubicImageViewImageFormatPropertiesEXT";
+      case StructureType::eDeviceQueueGlobalPriorityCreateInfoEXT : return "DeviceQueueGlobalPriorityCreateInfoEXT";
+      case StructureType::eImportMemoryHostPointerInfoEXT : return "ImportMemoryHostPointerInfoEXT";
+      case StructureType::eMemoryHostPointerPropertiesEXT : return "MemoryHostPointerPropertiesEXT";
+      case StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT : return "PhysicalDeviceExternalMemoryHostPropertiesEXT";
+      case StructureType::ePhysicalDeviceShaderClockFeaturesKHR : return "PhysicalDeviceShaderClockFeaturesKHR";
+      case StructureType::ePipelineCompilerControlCreateInfoAMD : return "PipelineCompilerControlCreateInfoAMD";
+      case StructureType::eCalibratedTimestampInfoEXT : return "CalibratedTimestampInfoEXT";
+      case StructureType::ePhysicalDeviceShaderCorePropertiesAMD : return "PhysicalDeviceShaderCorePropertiesAMD";
+      case StructureType::eDeviceMemoryOverallocationCreateInfoAMD : return "DeviceMemoryOverallocationCreateInfoAMD";
+      case StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT : return "PhysicalDeviceVertexAttributeDivisorPropertiesEXT";
+      case StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT : return "PipelineVertexInputDivisorStateCreateInfoEXT";
+      case StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT : return "PhysicalDeviceVertexAttributeDivisorFeaturesEXT";
+      case StructureType::ePresentFrameTokenGGP : return "PresentFrameTokenGGP";
+      case StructureType::ePipelineCreationFeedbackCreateInfoEXT : return "PipelineCreationFeedbackCreateInfoEXT";
+      case StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV : return "PhysicalDeviceComputeShaderDerivativesFeaturesNV";
+      case StructureType::ePhysicalDeviceMeshShaderFeaturesNV : return "PhysicalDeviceMeshShaderFeaturesNV";
+      case StructureType::ePhysicalDeviceMeshShaderPropertiesNV : return "PhysicalDeviceMeshShaderPropertiesNV";
+      case StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesNV : return "PhysicalDeviceFragmentShaderBarycentricFeaturesNV";
+      case StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV : return "PhysicalDeviceShaderImageFootprintFeaturesNV";
+      case StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV : return "PipelineViewportExclusiveScissorStateCreateInfoNV";
+      case StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV : return "PhysicalDeviceExclusiveScissorFeaturesNV";
+      case StructureType::eCheckpointDataNV : return "CheckpointDataNV";
+      case StructureType::eQueueFamilyCheckpointPropertiesNV : return "QueueFamilyCheckpointPropertiesNV";
+      case StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL : return "PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL";
+      case StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL : return "QueryPoolPerformanceQueryCreateInfoINTEL";
+      case StructureType::eInitializePerformanceApiInfoINTEL : return "InitializePerformanceApiInfoINTEL";
+      case StructureType::ePerformanceMarkerInfoINTEL : return "PerformanceMarkerInfoINTEL";
+      case StructureType::ePerformanceStreamMarkerInfoINTEL : return "PerformanceStreamMarkerInfoINTEL";
+      case StructureType::ePerformanceOverrideInfoINTEL : return "PerformanceOverrideInfoINTEL";
+      case StructureType::ePerformanceConfigurationAcquireInfoINTEL : return "PerformanceConfigurationAcquireInfoINTEL";
+      case StructureType::ePhysicalDevicePciBusInfoPropertiesEXT : return "PhysicalDevicePciBusInfoPropertiesEXT";
+      case StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD : return "DisplayNativeHdrSurfaceCapabilitiesAMD";
+      case StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD : return "SwapchainDisplayNativeHdrCreateInfoAMD";
+      case StructureType::eImagepipeSurfaceCreateInfoFUCHSIA : return "ImagepipeSurfaceCreateInfoFUCHSIA";
+      case StructureType::ePhysicalDeviceShaderTerminateInvocationFeaturesKHR : return "PhysicalDeviceShaderTerminateInvocationFeaturesKHR";
+      case StructureType::eMetalSurfaceCreateInfoEXT : return "MetalSurfaceCreateInfoEXT";
+      case StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT : return "PhysicalDeviceFragmentDensityMapFeaturesEXT";
+      case StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT : return "PhysicalDeviceFragmentDensityMapPropertiesEXT";
+      case StructureType::eRenderPassFragmentDensityMapCreateInfoEXT : return "RenderPassFragmentDensityMapCreateInfoEXT";
+      case StructureType::ePhysicalDeviceSubgroupSizeControlPropertiesEXT : return "PhysicalDeviceSubgroupSizeControlPropertiesEXT";
+      case StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfoEXT : return "PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT";
+      case StructureType::ePhysicalDeviceSubgroupSizeControlFeaturesEXT : return "PhysicalDeviceSubgroupSizeControlFeaturesEXT";
+      case StructureType::eFragmentShadingRateAttachmentInfoKHR : return "FragmentShadingRateAttachmentInfoKHR";
+      case StructureType::ePipelineFragmentShadingRateStateCreateInfoKHR : return "PipelineFragmentShadingRateStateCreateInfoKHR";
+      case StructureType::ePhysicalDeviceFragmentShadingRatePropertiesKHR : return "PhysicalDeviceFragmentShadingRatePropertiesKHR";
+      case StructureType::ePhysicalDeviceFragmentShadingRateFeaturesKHR : return "PhysicalDeviceFragmentShadingRateFeaturesKHR";
+      case StructureType::ePhysicalDeviceFragmentShadingRateKHR : return "PhysicalDeviceFragmentShadingRateKHR";
+      case StructureType::ePhysicalDeviceShaderCoreProperties2AMD : return "PhysicalDeviceShaderCoreProperties2AMD";
+      case StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD : return "PhysicalDeviceCoherentMemoryFeaturesAMD";
+      case StructureType::ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT : return "PhysicalDeviceShaderImageAtomicInt64FeaturesEXT";
+      case StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT : return "PhysicalDeviceMemoryBudgetPropertiesEXT";
+      case StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT : return "PhysicalDeviceMemoryPriorityFeaturesEXT";
+      case StructureType::eMemoryPriorityAllocateInfoEXT : return "MemoryPriorityAllocateInfoEXT";
+      case StructureType::eSurfaceProtectedCapabilitiesKHR : return "SurfaceProtectedCapabilitiesKHR";
+      case StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV : return "PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV";
+      case StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT : return "PhysicalDeviceBufferDeviceAddressFeaturesEXT";
+      case StructureType::eBufferDeviceAddressCreateInfoEXT : return "BufferDeviceAddressCreateInfoEXT";
+      case StructureType::ePhysicalDeviceToolPropertiesEXT : return "PhysicalDeviceToolPropertiesEXT";
+      case StructureType::eValidationFeaturesEXT : return "ValidationFeaturesEXT";
+      case StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV : return "PhysicalDeviceCooperativeMatrixFeaturesNV";
+      case StructureType::eCooperativeMatrixPropertiesNV : return "CooperativeMatrixPropertiesNV";
+      case StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV : return "PhysicalDeviceCooperativeMatrixPropertiesNV";
+      case StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV : return "PhysicalDeviceCoverageReductionModeFeaturesNV";
+      case StructureType::ePipelineCoverageReductionStateCreateInfoNV : return "PipelineCoverageReductionStateCreateInfoNV";
+      case StructureType::eFramebufferMixedSamplesCombinationNV : return "FramebufferMixedSamplesCombinationNV";
+      case StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT : return "PhysicalDeviceFragmentShaderInterlockFeaturesEXT";
+      case StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT : return "PhysicalDeviceYcbcrImageArraysFeaturesEXT";
+      case StructureType::eSurfaceFullScreenExclusiveInfoEXT : return "SurfaceFullScreenExclusiveInfoEXT";
+      case StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT : return "SurfaceCapabilitiesFullScreenExclusiveEXT";
+      case StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT : return "SurfaceFullScreenExclusiveWin32InfoEXT";
+      case StructureType::eHeadlessSurfaceCreateInfoEXT : return "HeadlessSurfaceCreateInfoEXT";
+      case StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT : return "PhysicalDeviceLineRasterizationFeaturesEXT";
+      case StructureType::ePipelineRasterizationLineStateCreateInfoEXT : return "PipelineRasterizationLineStateCreateInfoEXT";
+      case StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT : return "PhysicalDeviceLineRasterizationPropertiesEXT";
+      case StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT : return "PhysicalDeviceShaderAtomicFloatFeaturesEXT";
+      case StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT : return "PhysicalDeviceIndexTypeUint8FeaturesEXT";
+      case StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT : return "PhysicalDeviceExtendedDynamicStateFeaturesEXT";
+      case StructureType::eDeferredOperationInfoKHR : return "DeferredOperationInfoKHR";
+      case StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR : return "PhysicalDevicePipelineExecutablePropertiesFeaturesKHR";
+      case StructureType::ePipelineInfoKHR : return "PipelineInfoKHR";
+      case StructureType::ePipelineExecutablePropertiesKHR : return "PipelineExecutablePropertiesKHR";
+      case StructureType::ePipelineExecutableInfoKHR : return "PipelineExecutableInfoKHR";
+      case StructureType::ePipelineExecutableStatisticKHR : return "PipelineExecutableStatisticKHR";
+      case StructureType::ePipelineExecutableInternalRepresentationKHR : return "PipelineExecutableInternalRepresentationKHR";
+      case StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT : return "PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT";
+      case StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV : return "PhysicalDeviceDeviceGeneratedCommandsPropertiesNV";
+      case StructureType::eGraphicsShaderGroupCreateInfoNV : return "GraphicsShaderGroupCreateInfoNV";
+      case StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV : return "GraphicsPipelineShaderGroupsCreateInfoNV";
+      case StructureType::eIndirectCommandsLayoutTokenNV : return "IndirectCommandsLayoutTokenNV";
+      case StructureType::eIndirectCommandsLayoutCreateInfoNV : return "IndirectCommandsLayoutCreateInfoNV";
+      case StructureType::eGeneratedCommandsInfoNV : return "GeneratedCommandsInfoNV";
+      case StructureType::eGeneratedCommandsMemoryRequirementsInfoNV : return "GeneratedCommandsMemoryRequirementsInfoNV";
+      case StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV : return "PhysicalDeviceDeviceGeneratedCommandsFeaturesNV";
+      case StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT : return "PhysicalDeviceTexelBufferAlignmentFeaturesEXT";
+      case StructureType::ePhysicalDeviceTexelBufferAlignmentPropertiesEXT : return "PhysicalDeviceTexelBufferAlignmentPropertiesEXT";
+      case StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM : return "CommandBufferInheritanceRenderPassTransformInfoQCOM";
+      case StructureType::eRenderPassTransformBeginInfoQCOM : return "RenderPassTransformBeginInfoQCOM";
+      case StructureType::ePhysicalDeviceDeviceMemoryReportFeaturesEXT : return "PhysicalDeviceDeviceMemoryReportFeaturesEXT";
+      case StructureType::eDeviceDeviceMemoryReportCreateInfoEXT : return "DeviceDeviceMemoryReportCreateInfoEXT";
+      case StructureType::eDeviceMemoryReportCallbackDataEXT : return "DeviceMemoryReportCallbackDataEXT";
+      case StructureType::ePhysicalDeviceRobustness2FeaturesEXT : return "PhysicalDeviceRobustness2FeaturesEXT";
+      case StructureType::ePhysicalDeviceRobustness2PropertiesEXT : return "PhysicalDeviceRobustness2PropertiesEXT";
+      case StructureType::eSamplerCustomBorderColorCreateInfoEXT : return "SamplerCustomBorderColorCreateInfoEXT";
+      case StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT : return "PhysicalDeviceCustomBorderColorPropertiesEXT";
+      case StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT : return "PhysicalDeviceCustomBorderColorFeaturesEXT";
+      case StructureType::ePipelineLibraryCreateInfoKHR : return "PipelineLibraryCreateInfoKHR";
+      case StructureType::ePhysicalDevicePrivateDataFeaturesEXT : return "PhysicalDevicePrivateDataFeaturesEXT";
+      case StructureType::eDevicePrivateDataCreateInfoEXT : return "DevicePrivateDataCreateInfoEXT";
+      case StructureType::ePrivateDataSlotCreateInfoEXT : return "PrivateDataSlotCreateInfoEXT";
+      case StructureType::ePhysicalDevicePipelineCreationCacheControlFeaturesEXT : return "PhysicalDevicePipelineCreationCacheControlFeaturesEXT";
+      case StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV : return "PhysicalDeviceDiagnosticsConfigFeaturesNV";
+      case StructureType::eDeviceDiagnosticsConfigCreateInfoNV : return "DeviceDiagnosticsConfigCreateInfoNV";
+      case StructureType::ePhysicalDeviceFragmentDensityMap2FeaturesEXT : return "PhysicalDeviceFragmentDensityMap2FeaturesEXT";
+      case StructureType::ePhysicalDeviceFragmentDensityMap2PropertiesEXT : return "PhysicalDeviceFragmentDensityMap2PropertiesEXT";
+      case StructureType::ePhysicalDeviceImageRobustnessFeaturesEXT : return "PhysicalDeviceImageRobustnessFeaturesEXT";
+      case StructureType::eCopyBufferInfo2KHR : return "CopyBufferInfo2KHR";
+      case StructureType::eCopyImageInfo2KHR : return "CopyImageInfo2KHR";
+      case StructureType::eCopyBufferToImageInfo2KHR : return "CopyBufferToImageInfo2KHR";
+      case StructureType::eCopyImageToBufferInfo2KHR : return "CopyImageToBufferInfo2KHR";
+      case StructureType::eBlitImageInfo2KHR : return "BlitImageInfo2KHR";
+      case StructureType::eResolveImageInfo2KHR : return "ResolveImageInfo2KHR";
+      case StructureType::eBufferCopy2KHR : return "BufferCopy2KHR";
+      case StructureType::eImageCopy2KHR : return "ImageCopy2KHR";
+      case StructureType::eImageBlit2KHR : return "ImageBlit2KHR";
+      case StructureType::eBufferImageCopy2KHR : return "BufferImageCopy2KHR";
+      case StructureType::eImageResolve2KHR : return "ImageResolve2KHR";
+      case StructureType::ePhysicalDevice4444FormatsFeaturesEXT : return "PhysicalDevice4444FormatsFeaturesEXT";
+      case StructureType::eDirectfbSurfaceCreateInfoEXT : return "DirectfbSurfaceCreateInfoEXT";
+      default: return "invalid";
     }
+  }
 
-    ApplicationInfo( VkApplicationInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ApplicationInfo ) );
-    }
-
-    ApplicationInfo& operator=( VkApplicationInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ApplicationInfo ) );
-      return *this;
-    }
-    ApplicationInfo& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    ApplicationInfo& setPApplicationName( const char* pApplicationName_ )
-    {
-      pApplicationName = pApplicationName_;
-      return *this;
-    }
-
-    ApplicationInfo& setApplicationVersion( uint32_t applicationVersion_ )
-    {
-      applicationVersion = applicationVersion_;
-      return *this;
-    }
-
-    ApplicationInfo& setPEngineName( const char* pEngineName_ )
-    {
-      pEngineName = pEngineName_;
-      return *this;
-    }
-
-    ApplicationInfo& setEngineVersion( uint32_t engineVersion_ )
-    {
-      engineVersion = engineVersion_;
-      return *this;
-    }
-
-    ApplicationInfo& setApiVersion( uint32_t apiVersion_ )
-    {
-      apiVersion = apiVersion_;
-      return *this;
-    }
-
-    operator VkApplicationInfo const&() const
-    {
-      return *reinterpret_cast<const VkApplicationInfo*>(this);
-    }
-
-    operator VkApplicationInfo &()
-    {
-      return *reinterpret_cast<VkApplicationInfo*>(this);
-    }
-
-    bool operator==( ApplicationInfo const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( pApplicationName == rhs.pApplicationName )
-          && ( applicationVersion == rhs.applicationVersion )
-          && ( pEngineName == rhs.pEngineName )
-          && ( engineVersion == rhs.engineVersion )
-          && ( apiVersion == rhs.apiVersion );
-    }
-
-    bool operator!=( ApplicationInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eApplicationInfo;
-
-  public:
-    const void* pNext = nullptr;
-    const char* pApplicationName;
-    uint32_t applicationVersion;
-    const char* pEngineName;
-    uint32_t engineVersion;
-    uint32_t apiVersion;
-  };
-  static_assert( sizeof( ApplicationInfo ) == sizeof( VkApplicationInfo ), "struct and wrapper have different size!" );
-
-  struct InstanceCreateInfo
+  enum class SubgroupFeatureFlagBits : VkSubgroupFeatureFlags
   {
-    InstanceCreateInfo( InstanceCreateFlags flags_ = InstanceCreateFlags(),
-                        const ApplicationInfo* pApplicationInfo_ = nullptr,
-                        uint32_t enabledLayerCount_ = 0,
-                        const char* const* ppEnabledLayerNames_ = nullptr,
-                        uint32_t enabledExtensionCount_ = 0,
-                        const char* const* ppEnabledExtensionNames_ = nullptr )
-      : flags( flags_ )
-      , pApplicationInfo( pApplicationInfo_ )
-      , enabledLayerCount( enabledLayerCount_ )
-      , ppEnabledLayerNames( ppEnabledLayerNames_ )
-      , enabledExtensionCount( enabledExtensionCount_ )
-      , ppEnabledExtensionNames( ppEnabledExtensionNames_ )
-    {
-    }
-
-    InstanceCreateInfo( VkInstanceCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( InstanceCreateInfo ) );
-    }
-
-    InstanceCreateInfo& operator=( VkInstanceCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( InstanceCreateInfo ) );
-      return *this;
-    }
-    InstanceCreateInfo& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    InstanceCreateInfo& setFlags( InstanceCreateFlags flags_ )
-    {
-      flags = flags_;
-      return *this;
-    }
-
-    InstanceCreateInfo& setPApplicationInfo( const ApplicationInfo* pApplicationInfo_ )
-    {
-      pApplicationInfo = pApplicationInfo_;
-      return *this;
-    }
-
-    InstanceCreateInfo& setEnabledLayerCount( uint32_t enabledLayerCount_ )
-    {
-      enabledLayerCount = enabledLayerCount_;
-      return *this;
-    }
-
-    InstanceCreateInfo& setPpEnabledLayerNames( const char* const* ppEnabledLayerNames_ )
-    {
-      ppEnabledLayerNames = ppEnabledLayerNames_;
-      return *this;
-    }
-
-    InstanceCreateInfo& setEnabledExtensionCount( uint32_t enabledExtensionCount_ )
-    {
-      enabledExtensionCount = enabledExtensionCount_;
-      return *this;
-    }
-
-    InstanceCreateInfo& setPpEnabledExtensionNames( const char* const* ppEnabledExtensionNames_ )
-    {
-      ppEnabledExtensionNames = ppEnabledExtensionNames_;
-      return *this;
-    }
-
-    operator VkInstanceCreateInfo const&() const
-    {
-      return *reinterpret_cast<const VkInstanceCreateInfo*>(this);
-    }
-
-    operator VkInstanceCreateInfo &()
-    {
-      return *reinterpret_cast<VkInstanceCreateInfo*>(this);
-    }
-
-    bool operator==( InstanceCreateInfo const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( pApplicationInfo == rhs.pApplicationInfo )
-          && ( enabledLayerCount == rhs.enabledLayerCount )
-          && ( ppEnabledLayerNames == rhs.ppEnabledLayerNames )
-          && ( enabledExtensionCount == rhs.enabledExtensionCount )
-          && ( ppEnabledExtensionNames == rhs.ppEnabledExtensionNames );
-    }
-
-    bool operator!=( InstanceCreateInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eInstanceCreateInfo;
-
-  public:
-    const void* pNext = nullptr;
-    InstanceCreateFlags flags;
-    const ApplicationInfo* pApplicationInfo;
-    uint32_t enabledLayerCount;
-    const char* const* ppEnabledLayerNames;
-    uint32_t enabledExtensionCount;
-    const char* const* ppEnabledExtensionNames;
+    eBasic = VK_SUBGROUP_FEATURE_BASIC_BIT,
+    eVote = VK_SUBGROUP_FEATURE_VOTE_BIT,
+    eArithmetic = VK_SUBGROUP_FEATURE_ARITHMETIC_BIT,
+    eBallot = VK_SUBGROUP_FEATURE_BALLOT_BIT,
+    eShuffle = VK_SUBGROUP_FEATURE_SHUFFLE_BIT,
+    eShuffleRelative = VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT,
+    eClustered = VK_SUBGROUP_FEATURE_CLUSTERED_BIT,
+    eQuad = VK_SUBGROUP_FEATURE_QUAD_BIT,
+    ePartitionedNV = VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV
   };
-  static_assert( sizeof( InstanceCreateInfo ) == sizeof( VkInstanceCreateInfo ), "struct and wrapper have different size!" );
 
-  struct MemoryAllocateInfo
+  VULKAN_HPP_INLINE std::string to_string( SubgroupFeatureFlagBits value )
   {
-    MemoryAllocateInfo( DeviceSize allocationSize_ = 0,
-                        uint32_t memoryTypeIndex_ = 0 )
-      : allocationSize( allocationSize_ )
-      , memoryTypeIndex( memoryTypeIndex_ )
+    switch ( value )
     {
+      case SubgroupFeatureFlagBits::eBasic : return "Basic";
+      case SubgroupFeatureFlagBits::eVote : return "Vote";
+      case SubgroupFeatureFlagBits::eArithmetic : return "Arithmetic";
+      case SubgroupFeatureFlagBits::eBallot : return "Ballot";
+      case SubgroupFeatureFlagBits::eShuffle : return "Shuffle";
+      case SubgroupFeatureFlagBits::eShuffleRelative : return "ShuffleRelative";
+      case SubgroupFeatureFlagBits::eClustered : return "Clustered";
+      case SubgroupFeatureFlagBits::eQuad : return "Quad";
+      case SubgroupFeatureFlagBits::ePartitionedNV : return "PartitionedNV";
+      default: return "invalid";
     }
+  }
 
-    MemoryAllocateInfo( VkMemoryAllocateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( MemoryAllocateInfo ) );
-    }
-
-    MemoryAllocateInfo& operator=( VkMemoryAllocateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( MemoryAllocateInfo ) );
-      return *this;
-    }
-    MemoryAllocateInfo& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    MemoryAllocateInfo& setAllocationSize( DeviceSize allocationSize_ )
-    {
-      allocationSize = allocationSize_;
-      return *this;
-    }
-
-    MemoryAllocateInfo& setMemoryTypeIndex( uint32_t memoryTypeIndex_ )
-    {
-      memoryTypeIndex = memoryTypeIndex_;
-      return *this;
-    }
-
-    operator VkMemoryAllocateInfo const&() const
-    {
-      return *reinterpret_cast<const VkMemoryAllocateInfo*>(this);
-    }
-
-    operator VkMemoryAllocateInfo &()
-    {
-      return *reinterpret_cast<VkMemoryAllocateInfo*>(this);
-    }
-
-    bool operator==( MemoryAllocateInfo const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( allocationSize == rhs.allocationSize )
-          && ( memoryTypeIndex == rhs.memoryTypeIndex );
-    }
-
-    bool operator!=( MemoryAllocateInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eMemoryAllocateInfo;
-
-  public:
-    const void* pNext = nullptr;
-    DeviceSize allocationSize;
-    uint32_t memoryTypeIndex;
-  };
-  static_assert( sizeof( MemoryAllocateInfo ) == sizeof( VkMemoryAllocateInfo ), "struct and wrapper have different size!" );
-
-  struct MappedMemoryRange
+  enum class SubpassContents
   {
-    MappedMemoryRange( DeviceMemory memory_ = DeviceMemory(),
-                       DeviceSize offset_ = 0,
-                       DeviceSize size_ = 0 )
-      : memory( memory_ )
-      , offset( offset_ )
-      , size( size_ )
-    {
-    }
-
-    MappedMemoryRange( VkMappedMemoryRange const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( MappedMemoryRange ) );
-    }
-
-    MappedMemoryRange& operator=( VkMappedMemoryRange const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( MappedMemoryRange ) );
-      return *this;
-    }
-    MappedMemoryRange& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    MappedMemoryRange& setMemory( DeviceMemory memory_ )
-    {
-      memory = memory_;
-      return *this;
-    }
-
-    MappedMemoryRange& setOffset( DeviceSize offset_ )
-    {
-      offset = offset_;
-      return *this;
-    }
-
-    MappedMemoryRange& setSize( DeviceSize size_ )
-    {
-      size = size_;
-      return *this;
-    }
-
-    operator VkMappedMemoryRange const&() const
-    {
-      return *reinterpret_cast<const VkMappedMemoryRange*>(this);
-    }
-
-    operator VkMappedMemoryRange &()
-    {
-      return *reinterpret_cast<VkMappedMemoryRange*>(this);
-    }
-
-    bool operator==( MappedMemoryRange const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( memory == rhs.memory )
-          && ( offset == rhs.offset )
-          && ( size == rhs.size );
-    }
-
-    bool operator!=( MappedMemoryRange const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eMappedMemoryRange;
-
-  public:
-    const void* pNext = nullptr;
-    DeviceMemory memory;
-    DeviceSize offset;
-    DeviceSize size;
+    eInline = VK_SUBPASS_CONTENTS_INLINE,
+    eSecondaryCommandBuffers = VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS
   };
-  static_assert( sizeof( MappedMemoryRange ) == sizeof( VkMappedMemoryRange ), "struct and wrapper have different size!" );
 
-  struct WriteDescriptorSet
+  VULKAN_HPP_INLINE std::string to_string( SubpassContents value )
   {
-    WriteDescriptorSet( DescriptorSet dstSet_ = DescriptorSet(),
-                        uint32_t dstBinding_ = 0,
-                        uint32_t dstArrayElement_ = 0,
-                        uint32_t descriptorCount_ = 0,
-                        DescriptorType descriptorType_ = DescriptorType::eSampler,
-                        const DescriptorImageInfo* pImageInfo_ = nullptr,
-                        const DescriptorBufferInfo* pBufferInfo_ = nullptr,
-                        const BufferView* pTexelBufferView_ = nullptr )
-      : dstSet( dstSet_ )
-      , dstBinding( dstBinding_ )
-      , dstArrayElement( dstArrayElement_ )
-      , descriptorCount( descriptorCount_ )
-      , descriptorType( descriptorType_ )
-      , pImageInfo( pImageInfo_ )
-      , pBufferInfo( pBufferInfo_ )
-      , pTexelBufferView( pTexelBufferView_ )
+    switch ( value )
     {
+      case SubpassContents::eInline : return "Inline";
+      case SubpassContents::eSecondaryCommandBuffers : return "SecondaryCommandBuffers";
+      default: return "invalid";
     }
+  }
 
-    WriteDescriptorSet( VkWriteDescriptorSet const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( WriteDescriptorSet ) );
-    }
-
-    WriteDescriptorSet& operator=( VkWriteDescriptorSet const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( WriteDescriptorSet ) );
-      return *this;
-    }
-    WriteDescriptorSet& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    WriteDescriptorSet& setDstSet( DescriptorSet dstSet_ )
-    {
-      dstSet = dstSet_;
-      return *this;
-    }
-
-    WriteDescriptorSet& setDstBinding( uint32_t dstBinding_ )
-    {
-      dstBinding = dstBinding_;
-      return *this;
-    }
-
-    WriteDescriptorSet& setDstArrayElement( uint32_t dstArrayElement_ )
-    {
-      dstArrayElement = dstArrayElement_;
-      return *this;
-    }
-
-    WriteDescriptorSet& setDescriptorCount( uint32_t descriptorCount_ )
-    {
-      descriptorCount = descriptorCount_;
-      return *this;
-    }
-
-    WriteDescriptorSet& setDescriptorType( DescriptorType descriptorType_ )
-    {
-      descriptorType = descriptorType_;
-      return *this;
-    }
-
-    WriteDescriptorSet& setPImageInfo( const DescriptorImageInfo* pImageInfo_ )
-    {
-      pImageInfo = pImageInfo_;
-      return *this;
-    }
-
-    WriteDescriptorSet& setPBufferInfo( const DescriptorBufferInfo* pBufferInfo_ )
-    {
-      pBufferInfo = pBufferInfo_;
-      return *this;
-    }
-
-    WriteDescriptorSet& setPTexelBufferView( const BufferView* pTexelBufferView_ )
-    {
-      pTexelBufferView = pTexelBufferView_;
-      return *this;
-    }
-
-    operator VkWriteDescriptorSet const&() const
-    {
-      return *reinterpret_cast<const VkWriteDescriptorSet*>(this);
-    }
-
-    operator VkWriteDescriptorSet &()
-    {
-      return *reinterpret_cast<VkWriteDescriptorSet*>(this);
-    }
-
-    bool operator==( WriteDescriptorSet const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( dstSet == rhs.dstSet )
-          && ( dstBinding == rhs.dstBinding )
-          && ( dstArrayElement == rhs.dstArrayElement )
-          && ( descriptorCount == rhs.descriptorCount )
-          && ( descriptorType == rhs.descriptorType )
-          && ( pImageInfo == rhs.pImageInfo )
-          && ( pBufferInfo == rhs.pBufferInfo )
-          && ( pTexelBufferView == rhs.pTexelBufferView );
-    }
-
-    bool operator!=( WriteDescriptorSet const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eWriteDescriptorSet;
-
-  public:
-    const void* pNext = nullptr;
-    DescriptorSet dstSet;
-    uint32_t dstBinding;
-    uint32_t dstArrayElement;
-    uint32_t descriptorCount;
-    DescriptorType descriptorType;
-    const DescriptorImageInfo* pImageInfo;
-    const DescriptorBufferInfo* pBufferInfo;
-    const BufferView* pTexelBufferView;
-  };
-  static_assert( sizeof( WriteDescriptorSet ) == sizeof( VkWriteDescriptorSet ), "struct and wrapper have different size!" );
-
-  struct CopyDescriptorSet
+  enum class SubpassDescriptionFlagBits : VkSubpassDescriptionFlags
   {
-    CopyDescriptorSet( DescriptorSet srcSet_ = DescriptorSet(),
-                       uint32_t srcBinding_ = 0,
-                       uint32_t srcArrayElement_ = 0,
-                       DescriptorSet dstSet_ = DescriptorSet(),
-                       uint32_t dstBinding_ = 0,
-                       uint32_t dstArrayElement_ = 0,
-                       uint32_t descriptorCount_ = 0 )
-      : srcSet( srcSet_ )
-      , srcBinding( srcBinding_ )
-      , srcArrayElement( srcArrayElement_ )
-      , dstSet( dstSet_ )
-      , dstBinding( dstBinding_ )
-      , dstArrayElement( dstArrayElement_ )
-      , descriptorCount( descriptorCount_ )
-    {
-    }
-
-    CopyDescriptorSet( VkCopyDescriptorSet const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( CopyDescriptorSet ) );
-    }
-
-    CopyDescriptorSet& operator=( VkCopyDescriptorSet const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( CopyDescriptorSet ) );
-      return *this;
-    }
-    CopyDescriptorSet& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    CopyDescriptorSet& setSrcSet( DescriptorSet srcSet_ )
-    {
-      srcSet = srcSet_;
-      return *this;
-    }
-
-    CopyDescriptorSet& setSrcBinding( uint32_t srcBinding_ )
-    {
-      srcBinding = srcBinding_;
-      return *this;
-    }
-
-    CopyDescriptorSet& setSrcArrayElement( uint32_t srcArrayElement_ )
-    {
-      srcArrayElement = srcArrayElement_;
-      return *this;
-    }
-
-    CopyDescriptorSet& setDstSet( DescriptorSet dstSet_ )
-    {
-      dstSet = dstSet_;
-      return *this;
-    }
-
-    CopyDescriptorSet& setDstBinding( uint32_t dstBinding_ )
-    {
-      dstBinding = dstBinding_;
-      return *this;
-    }
-
-    CopyDescriptorSet& setDstArrayElement( uint32_t dstArrayElement_ )
-    {
-      dstArrayElement = dstArrayElement_;
-      return *this;
-    }
-
-    CopyDescriptorSet& setDescriptorCount( uint32_t descriptorCount_ )
-    {
-      descriptorCount = descriptorCount_;
-      return *this;
-    }
-
-    operator VkCopyDescriptorSet const&() const
-    {
-      return *reinterpret_cast<const VkCopyDescriptorSet*>(this);
-    }
-
-    operator VkCopyDescriptorSet &()
-    {
-      return *reinterpret_cast<VkCopyDescriptorSet*>(this);
-    }
-
-    bool operator==( CopyDescriptorSet const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( srcSet == rhs.srcSet )
-          && ( srcBinding == rhs.srcBinding )
-          && ( srcArrayElement == rhs.srcArrayElement )
-          && ( dstSet == rhs.dstSet )
-          && ( dstBinding == rhs.dstBinding )
-          && ( dstArrayElement == rhs.dstArrayElement )
-          && ( descriptorCount == rhs.descriptorCount );
-    }
-
-    bool operator!=( CopyDescriptorSet const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eCopyDescriptorSet;
-
-  public:
-    const void* pNext = nullptr;
-    DescriptorSet srcSet;
-    uint32_t srcBinding;
-    uint32_t srcArrayElement;
-    DescriptorSet dstSet;
-    uint32_t dstBinding;
-    uint32_t dstArrayElement;
-    uint32_t descriptorCount;
+    ePerViewAttributesNVX = VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX,
+    ePerViewPositionXOnlyNVX = VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX,
+    eFragmentRegionQCOM = VK_SUBPASS_DESCRIPTION_FRAGMENT_REGION_BIT_QCOM,
+    eShaderResolveQCOM = VK_SUBPASS_DESCRIPTION_SHADER_RESOLVE_BIT_QCOM
   };
-  static_assert( sizeof( CopyDescriptorSet ) == sizeof( VkCopyDescriptorSet ), "struct and wrapper have different size!" );
 
-  struct BufferViewCreateInfo
+  VULKAN_HPP_INLINE std::string to_string( SubpassDescriptionFlagBits value )
   {
-    BufferViewCreateInfo( BufferViewCreateFlags flags_ = BufferViewCreateFlags(),
-                          Buffer buffer_ = Buffer(),
-                          Format format_ = Format::eUndefined,
-                          DeviceSize offset_ = 0,
-                          DeviceSize range_ = 0 )
-      : flags( flags_ )
-      , buffer( buffer_ )
-      , format( format_ )
-      , offset( offset_ )
-      , range( range_ )
+    switch ( value )
     {
+      case SubpassDescriptionFlagBits::ePerViewAttributesNVX : return "PerViewAttributesNVX";
+      case SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX : return "PerViewPositionXOnlyNVX";
+      case SubpassDescriptionFlagBits::eFragmentRegionQCOM : return "FragmentRegionQCOM";
+      case SubpassDescriptionFlagBits::eShaderResolveQCOM : return "ShaderResolveQCOM";
+      default: return "invalid";
     }
+  }
 
-    BufferViewCreateInfo( VkBufferViewCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( BufferViewCreateInfo ) );
-    }
-
-    BufferViewCreateInfo& operator=( VkBufferViewCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( BufferViewCreateInfo ) );
-      return *this;
-    }
-    BufferViewCreateInfo& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    BufferViewCreateInfo& setFlags( BufferViewCreateFlags flags_ )
-    {
-      flags = flags_;
-      return *this;
-    }
-
-    BufferViewCreateInfo& setBuffer( Buffer buffer_ )
-    {
-      buffer = buffer_;
-      return *this;
-    }
-
-    BufferViewCreateInfo& setFormat( Format format_ )
-    {
-      format = format_;
-      return *this;
-    }
-
-    BufferViewCreateInfo& setOffset( DeviceSize offset_ )
-    {
-      offset = offset_;
-      return *this;
-    }
-
-    BufferViewCreateInfo& setRange( DeviceSize range_ )
-    {
-      range = range_;
-      return *this;
-    }
-
-    operator VkBufferViewCreateInfo const&() const
-    {
-      return *reinterpret_cast<const VkBufferViewCreateInfo*>(this);
-    }
-
-    operator VkBufferViewCreateInfo &()
-    {
-      return *reinterpret_cast<VkBufferViewCreateInfo*>(this);
-    }
-
-    bool operator==( BufferViewCreateInfo const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( buffer == rhs.buffer )
-          && ( format == rhs.format )
-          && ( offset == rhs.offset )
-          && ( range == rhs.range );
-    }
-
-    bool operator!=( BufferViewCreateInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eBufferViewCreateInfo;
-
-  public:
-    const void* pNext = nullptr;
-    BufferViewCreateFlags flags;
-    Buffer buffer;
-    Format format;
-    DeviceSize offset;
-    DeviceSize range;
-  };
-  static_assert( sizeof( BufferViewCreateInfo ) == sizeof( VkBufferViewCreateInfo ), "struct and wrapper have different size!" );
-
-  struct ShaderModuleCreateInfo
+  enum class SurfaceCounterFlagBitsEXT : VkSurfaceCounterFlagsEXT
   {
-    ShaderModuleCreateInfo( ShaderModuleCreateFlags flags_ = ShaderModuleCreateFlags(),
-                            size_t codeSize_ = 0,
-                            const uint32_t* pCode_ = nullptr )
-      : flags( flags_ )
-      , codeSize( codeSize_ )
-      , pCode( pCode_ )
-    {
-    }
-
-    ShaderModuleCreateInfo( VkShaderModuleCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ShaderModuleCreateInfo ) );
-    }
-
-    ShaderModuleCreateInfo& operator=( VkShaderModuleCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ShaderModuleCreateInfo ) );
-      return *this;
-    }
-    ShaderModuleCreateInfo& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    ShaderModuleCreateInfo& setFlags( ShaderModuleCreateFlags flags_ )
-    {
-      flags = flags_;
-      return *this;
-    }
-
-    ShaderModuleCreateInfo& setCodeSize( size_t codeSize_ )
-    {
-      codeSize = codeSize_;
-      return *this;
-    }
-
-    ShaderModuleCreateInfo& setPCode( const uint32_t* pCode_ )
-    {
-      pCode = pCode_;
-      return *this;
-    }
-
-    operator VkShaderModuleCreateInfo const&() const
-    {
-      return *reinterpret_cast<const VkShaderModuleCreateInfo*>(this);
-    }
-
-    operator VkShaderModuleCreateInfo &()
-    {
-      return *reinterpret_cast<VkShaderModuleCreateInfo*>(this);
-    }
-
-    bool operator==( ShaderModuleCreateInfo const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( codeSize == rhs.codeSize )
-          && ( pCode == rhs.pCode );
-    }
-
-    bool operator!=( ShaderModuleCreateInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eShaderModuleCreateInfo;
-
-  public:
-    const void* pNext = nullptr;
-    ShaderModuleCreateFlags flags;
-    size_t codeSize;
-    const uint32_t* pCode;
+    eVblank = VK_SURFACE_COUNTER_VBLANK_EXT
   };
-  static_assert( sizeof( ShaderModuleCreateInfo ) == sizeof( VkShaderModuleCreateInfo ), "struct and wrapper have different size!" );
 
-  struct DescriptorSetAllocateInfo
+  VULKAN_HPP_INLINE std::string to_string( SurfaceCounterFlagBitsEXT value )
   {
-    DescriptorSetAllocateInfo( DescriptorPool descriptorPool_ = DescriptorPool(),
-                               uint32_t descriptorSetCount_ = 0,
-                               const DescriptorSetLayout* pSetLayouts_ = nullptr )
-      : descriptorPool( descriptorPool_ )
-      , descriptorSetCount( descriptorSetCount_ )
-      , pSetLayouts( pSetLayouts_ )
+    switch ( value )
     {
+      case SurfaceCounterFlagBitsEXT::eVblank : return "Vblank";
+      default: return "invalid";
     }
+  }
 
-    DescriptorSetAllocateInfo( VkDescriptorSetAllocateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DescriptorSetAllocateInfo ) );
-    }
-
-    DescriptorSetAllocateInfo& operator=( VkDescriptorSetAllocateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DescriptorSetAllocateInfo ) );
-      return *this;
-    }
-    DescriptorSetAllocateInfo& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    DescriptorSetAllocateInfo& setDescriptorPool( DescriptorPool descriptorPool_ )
-    {
-      descriptorPool = descriptorPool_;
-      return *this;
-    }
-
-    DescriptorSetAllocateInfo& setDescriptorSetCount( uint32_t descriptorSetCount_ )
-    {
-      descriptorSetCount = descriptorSetCount_;
-      return *this;
-    }
-
-    DescriptorSetAllocateInfo& setPSetLayouts( const DescriptorSetLayout* pSetLayouts_ )
-    {
-      pSetLayouts = pSetLayouts_;
-      return *this;
-    }
-
-    operator VkDescriptorSetAllocateInfo const&() const
-    {
-      return *reinterpret_cast<const VkDescriptorSetAllocateInfo*>(this);
-    }
-
-    operator VkDescriptorSetAllocateInfo &()
-    {
-      return *reinterpret_cast<VkDescriptorSetAllocateInfo*>(this);
-    }
-
-    bool operator==( DescriptorSetAllocateInfo const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( descriptorPool == rhs.descriptorPool )
-          && ( descriptorSetCount == rhs.descriptorSetCount )
-          && ( pSetLayouts == rhs.pSetLayouts );
-    }
-
-    bool operator!=( DescriptorSetAllocateInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eDescriptorSetAllocateInfo;
-
-  public:
-    const void* pNext = nullptr;
-    DescriptorPool descriptorPool;
-    uint32_t descriptorSetCount;
-    const DescriptorSetLayout* pSetLayouts;
-  };
-  static_assert( sizeof( DescriptorSetAllocateInfo ) == sizeof( VkDescriptorSetAllocateInfo ), "struct and wrapper have different size!" );
-
-  struct PipelineVertexInputStateCreateInfo
+  enum class SurfaceTransformFlagBitsKHR : VkSurfaceTransformFlagsKHR
   {
-    PipelineVertexInputStateCreateInfo( PipelineVertexInputStateCreateFlags flags_ = PipelineVertexInputStateCreateFlags(),
-                                        uint32_t vertexBindingDescriptionCount_ = 0,
-                                        const VertexInputBindingDescription* pVertexBindingDescriptions_ = nullptr,
-                                        uint32_t vertexAttributeDescriptionCount_ = 0,
-                                        const VertexInputAttributeDescription* pVertexAttributeDescriptions_ = nullptr )
-      : flags( flags_ )
-      , vertexBindingDescriptionCount( vertexBindingDescriptionCount_ )
-      , pVertexBindingDescriptions( pVertexBindingDescriptions_ )
-      , vertexAttributeDescriptionCount( vertexAttributeDescriptionCount_ )
-      , pVertexAttributeDescriptions( pVertexAttributeDescriptions_ )
-    {
-    }
-
-    PipelineVertexInputStateCreateInfo( VkPipelineVertexInputStateCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PipelineVertexInputStateCreateInfo ) );
-    }
-
-    PipelineVertexInputStateCreateInfo& operator=( VkPipelineVertexInputStateCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PipelineVertexInputStateCreateInfo ) );
-      return *this;
-    }
-    PipelineVertexInputStateCreateInfo& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    PipelineVertexInputStateCreateInfo& setFlags( PipelineVertexInputStateCreateFlags flags_ )
-    {
-      flags = flags_;
-      return *this;
-    }
-
-    PipelineVertexInputStateCreateInfo& setVertexBindingDescriptionCount( uint32_t vertexBindingDescriptionCount_ )
-    {
-      vertexBindingDescriptionCount = vertexBindingDescriptionCount_;
-      return *this;
-    }
-
-    PipelineVertexInputStateCreateInfo& setPVertexBindingDescriptions( const VertexInputBindingDescription* pVertexBindingDescriptions_ )
-    {
-      pVertexBindingDescriptions = pVertexBindingDescriptions_;
-      return *this;
-    }
-
-    PipelineVertexInputStateCreateInfo& setVertexAttributeDescriptionCount( uint32_t vertexAttributeDescriptionCount_ )
-    {
-      vertexAttributeDescriptionCount = vertexAttributeDescriptionCount_;
-      return *this;
-    }
-
-    PipelineVertexInputStateCreateInfo& setPVertexAttributeDescriptions( const VertexInputAttributeDescription* pVertexAttributeDescriptions_ )
-    {
-      pVertexAttributeDescriptions = pVertexAttributeDescriptions_;
-      return *this;
-    }
-
-    operator VkPipelineVertexInputStateCreateInfo const&() const
-    {
-      return *reinterpret_cast<const VkPipelineVertexInputStateCreateInfo*>(this);
-    }
-
-    operator VkPipelineVertexInputStateCreateInfo &()
-    {
-      return *reinterpret_cast<VkPipelineVertexInputStateCreateInfo*>(this);
-    }
-
-    bool operator==( PipelineVertexInputStateCreateInfo const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( vertexBindingDescriptionCount == rhs.vertexBindingDescriptionCount )
-          && ( pVertexBindingDescriptions == rhs.pVertexBindingDescriptions )
-          && ( vertexAttributeDescriptionCount == rhs.vertexAttributeDescriptionCount )
-          && ( pVertexAttributeDescriptions == rhs.pVertexAttributeDescriptions );
-    }
-
-    bool operator!=( PipelineVertexInputStateCreateInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePipelineVertexInputStateCreateInfo;
-
-  public:
-    const void* pNext = nullptr;
-    PipelineVertexInputStateCreateFlags flags;
-    uint32_t vertexBindingDescriptionCount;
-    const VertexInputBindingDescription* pVertexBindingDescriptions;
-    uint32_t vertexAttributeDescriptionCount;
-    const VertexInputAttributeDescription* pVertexAttributeDescriptions;
+    eIdentity = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR,
+    eRotate90 = VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR,
+    eRotate180 = VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR,
+    eRotate270 = VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR,
+    eHorizontalMirror = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR,
+    eHorizontalMirrorRotate90 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR,
+    eHorizontalMirrorRotate180 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR,
+    eHorizontalMirrorRotate270 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR,
+    eInherit = VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR
   };
-  static_assert( sizeof( PipelineVertexInputStateCreateInfo ) == sizeof( VkPipelineVertexInputStateCreateInfo ), "struct and wrapper have different size!" );
 
-  struct PipelineInputAssemblyStateCreateInfo
+  VULKAN_HPP_INLINE std::string to_string( SurfaceTransformFlagBitsKHR value )
   {
-    PipelineInputAssemblyStateCreateInfo( PipelineInputAssemblyStateCreateFlags flags_ = PipelineInputAssemblyStateCreateFlags(),
-                                          PrimitiveTopology topology_ = PrimitiveTopology::ePointList,
-                                          Bool32 primitiveRestartEnable_ = 0 )
-      : flags( flags_ )
-      , topology( topology_ )
-      , primitiveRestartEnable( primitiveRestartEnable_ )
+    switch ( value )
     {
+      case SurfaceTransformFlagBitsKHR::eIdentity : return "Identity";
+      case SurfaceTransformFlagBitsKHR::eRotate90 : return "Rotate90";
+      case SurfaceTransformFlagBitsKHR::eRotate180 : return "Rotate180";
+      case SurfaceTransformFlagBitsKHR::eRotate270 : return "Rotate270";
+      case SurfaceTransformFlagBitsKHR::eHorizontalMirror : return "HorizontalMirror";
+      case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90 : return "HorizontalMirrorRotate90";
+      case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180 : return "HorizontalMirrorRotate180";
+      case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270 : return "HorizontalMirrorRotate270";
+      case SurfaceTransformFlagBitsKHR::eInherit : return "Inherit";
+      default: return "invalid";
     }
+  }
 
-    PipelineInputAssemblyStateCreateInfo( VkPipelineInputAssemblyStateCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PipelineInputAssemblyStateCreateInfo ) );
-    }
-
-    PipelineInputAssemblyStateCreateInfo& operator=( VkPipelineInputAssemblyStateCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PipelineInputAssemblyStateCreateInfo ) );
-      return *this;
-    }
-    PipelineInputAssemblyStateCreateInfo& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    PipelineInputAssemblyStateCreateInfo& setFlags( PipelineInputAssemblyStateCreateFlags flags_ )
-    {
-      flags = flags_;
-      return *this;
-    }
-
-    PipelineInputAssemblyStateCreateInfo& setTopology( PrimitiveTopology topology_ )
-    {
-      topology = topology_;
-      return *this;
-    }
-
-    PipelineInputAssemblyStateCreateInfo& setPrimitiveRestartEnable( Bool32 primitiveRestartEnable_ )
-    {
-      primitiveRestartEnable = primitiveRestartEnable_;
-      return *this;
-    }
-
-    operator VkPipelineInputAssemblyStateCreateInfo const&() const
-    {
-      return *reinterpret_cast<const VkPipelineInputAssemblyStateCreateInfo*>(this);
-    }
-
-    operator VkPipelineInputAssemblyStateCreateInfo &()
-    {
-      return *reinterpret_cast<VkPipelineInputAssemblyStateCreateInfo*>(this);
-    }
-
-    bool operator==( PipelineInputAssemblyStateCreateInfo const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( topology == rhs.topology )
-          && ( primitiveRestartEnable == rhs.primitiveRestartEnable );
-    }
-
-    bool operator!=( PipelineInputAssemblyStateCreateInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePipelineInputAssemblyStateCreateInfo;
-
-  public:
-    const void* pNext = nullptr;
-    PipelineInputAssemblyStateCreateFlags flags;
-    PrimitiveTopology topology;
-    Bool32 primitiveRestartEnable;
-  };
-  static_assert( sizeof( PipelineInputAssemblyStateCreateInfo ) == sizeof( VkPipelineInputAssemblyStateCreateInfo ), "struct and wrapper have different size!" );
-
-  struct PipelineTessellationStateCreateInfo
+  enum class SwapchainCreateFlagBitsKHR : VkSwapchainCreateFlagsKHR
   {
-    PipelineTessellationStateCreateInfo( PipelineTessellationStateCreateFlags flags_ = PipelineTessellationStateCreateFlags(),
-                                         uint32_t patchControlPoints_ = 0 )
-      : flags( flags_ )
-      , patchControlPoints( patchControlPoints_ )
-    {
-    }
-
-    PipelineTessellationStateCreateInfo( VkPipelineTessellationStateCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PipelineTessellationStateCreateInfo ) );
-    }
-
-    PipelineTessellationStateCreateInfo& operator=( VkPipelineTessellationStateCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PipelineTessellationStateCreateInfo ) );
-      return *this;
-    }
-    PipelineTessellationStateCreateInfo& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    PipelineTessellationStateCreateInfo& setFlags( PipelineTessellationStateCreateFlags flags_ )
-    {
-      flags = flags_;
-      return *this;
-    }
-
-    PipelineTessellationStateCreateInfo& setPatchControlPoints( uint32_t patchControlPoints_ )
-    {
-      patchControlPoints = patchControlPoints_;
-      return *this;
-    }
-
-    operator VkPipelineTessellationStateCreateInfo const&() const
-    {
-      return *reinterpret_cast<const VkPipelineTessellationStateCreateInfo*>(this);
-    }
-
-    operator VkPipelineTessellationStateCreateInfo &()
-    {
-      return *reinterpret_cast<VkPipelineTessellationStateCreateInfo*>(this);
-    }
-
-    bool operator==( PipelineTessellationStateCreateInfo const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( patchControlPoints == rhs.patchControlPoints );
-    }
-
-    bool operator!=( PipelineTessellationStateCreateInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePipelineTessellationStateCreateInfo;
-
-  public:
-    const void* pNext = nullptr;
-    PipelineTessellationStateCreateFlags flags;
-    uint32_t patchControlPoints;
+    eSplitInstanceBindRegions = VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR,
+    eProtected = VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR,
+    eMutableFormat = VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR
   };
-  static_assert( sizeof( PipelineTessellationStateCreateInfo ) == sizeof( VkPipelineTessellationStateCreateInfo ), "struct and wrapper have different size!" );
 
-  struct PipelineViewportStateCreateInfo
+  VULKAN_HPP_INLINE std::string to_string( SwapchainCreateFlagBitsKHR value )
   {
-    PipelineViewportStateCreateInfo( PipelineViewportStateCreateFlags flags_ = PipelineViewportStateCreateFlags(),
-                                     uint32_t viewportCount_ = 0,
-                                     const Viewport* pViewports_ = nullptr,
-                                     uint32_t scissorCount_ = 0,
-                                     const Rect2D* pScissors_ = nullptr )
-      : flags( flags_ )
-      , viewportCount( viewportCount_ )
-      , pViewports( pViewports_ )
-      , scissorCount( scissorCount_ )
-      , pScissors( pScissors_ )
+    switch ( value )
     {
+      case SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions : return "SplitInstanceBindRegions";
+      case SwapchainCreateFlagBitsKHR::eProtected : return "Protected";
+      case SwapchainCreateFlagBitsKHR::eMutableFormat : return "MutableFormat";
+      default: return "invalid";
     }
+  }
 
-    PipelineViewportStateCreateInfo( VkPipelineViewportStateCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PipelineViewportStateCreateInfo ) );
-    }
-
-    PipelineViewportStateCreateInfo& operator=( VkPipelineViewportStateCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PipelineViewportStateCreateInfo ) );
-      return *this;
-    }
-    PipelineViewportStateCreateInfo& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    PipelineViewportStateCreateInfo& setFlags( PipelineViewportStateCreateFlags flags_ )
-    {
-      flags = flags_;
-      return *this;
-    }
-
-    PipelineViewportStateCreateInfo& setViewportCount( uint32_t viewportCount_ )
-    {
-      viewportCount = viewportCount_;
-      return *this;
-    }
-
-    PipelineViewportStateCreateInfo& setPViewports( const Viewport* pViewports_ )
-    {
-      pViewports = pViewports_;
-      return *this;
-    }
-
-    PipelineViewportStateCreateInfo& setScissorCount( uint32_t scissorCount_ )
-    {
-      scissorCount = scissorCount_;
-      return *this;
-    }
-
-    PipelineViewportStateCreateInfo& setPScissors( const Rect2D* pScissors_ )
-    {
-      pScissors = pScissors_;
-      return *this;
-    }
-
-    operator VkPipelineViewportStateCreateInfo const&() const
-    {
-      return *reinterpret_cast<const VkPipelineViewportStateCreateInfo*>(this);
-    }
-
-    operator VkPipelineViewportStateCreateInfo &()
-    {
-      return *reinterpret_cast<VkPipelineViewportStateCreateInfo*>(this);
-    }
-
-    bool operator==( PipelineViewportStateCreateInfo const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( viewportCount == rhs.viewportCount )
-          && ( pViewports == rhs.pViewports )
-          && ( scissorCount == rhs.scissorCount )
-          && ( pScissors == rhs.pScissors );
-    }
-
-    bool operator!=( PipelineViewportStateCreateInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePipelineViewportStateCreateInfo;
-
-  public:
-    const void* pNext = nullptr;
-    PipelineViewportStateCreateFlags flags;
-    uint32_t viewportCount;
-    const Viewport* pViewports;
-    uint32_t scissorCount;
-    const Rect2D* pScissors;
-  };
-  static_assert( sizeof( PipelineViewportStateCreateInfo ) == sizeof( VkPipelineViewportStateCreateInfo ), "struct and wrapper have different size!" );
-
-  struct PipelineRasterizationStateCreateInfo
+  enum class SystemAllocationScope
   {
-    PipelineRasterizationStateCreateInfo( PipelineRasterizationStateCreateFlags flags_ = PipelineRasterizationStateCreateFlags(),
-                                          Bool32 depthClampEnable_ = 0,
-                                          Bool32 rasterizerDiscardEnable_ = 0,
-                                          PolygonMode polygonMode_ = PolygonMode::eFill,
-                                          CullModeFlags cullMode_ = CullModeFlags(),
-                                          FrontFace frontFace_ = FrontFace::eCounterClockwise,
-                                          Bool32 depthBiasEnable_ = 0,
-                                          float depthBiasConstantFactor_ = 0,
-                                          float depthBiasClamp_ = 0,
-                                          float depthBiasSlopeFactor_ = 0,
-                                          float lineWidth_ = 0 )
-      : flags( flags_ )
-      , depthClampEnable( depthClampEnable_ )
-      , rasterizerDiscardEnable( rasterizerDiscardEnable_ )
-      , polygonMode( polygonMode_ )
-      , cullMode( cullMode_ )
-      , frontFace( frontFace_ )
-      , depthBiasEnable( depthBiasEnable_ )
-      , depthBiasConstantFactor( depthBiasConstantFactor_ )
-      , depthBiasClamp( depthBiasClamp_ )
-      , depthBiasSlopeFactor( depthBiasSlopeFactor_ )
-      , lineWidth( lineWidth_ )
-    {
-    }
-
-    PipelineRasterizationStateCreateInfo( VkPipelineRasterizationStateCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PipelineRasterizationStateCreateInfo ) );
-    }
-
-    PipelineRasterizationStateCreateInfo& operator=( VkPipelineRasterizationStateCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PipelineRasterizationStateCreateInfo ) );
-      return *this;
-    }
-    PipelineRasterizationStateCreateInfo& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    PipelineRasterizationStateCreateInfo& setFlags( PipelineRasterizationStateCreateFlags flags_ )
-    {
-      flags = flags_;
-      return *this;
-    }
-
-    PipelineRasterizationStateCreateInfo& setDepthClampEnable( Bool32 depthClampEnable_ )
-    {
-      depthClampEnable = depthClampEnable_;
-      return *this;
-    }
-
-    PipelineRasterizationStateCreateInfo& setRasterizerDiscardEnable( Bool32 rasterizerDiscardEnable_ )
-    {
-      rasterizerDiscardEnable = rasterizerDiscardEnable_;
-      return *this;
-    }
-
-    PipelineRasterizationStateCreateInfo& setPolygonMode( PolygonMode polygonMode_ )
-    {
-      polygonMode = polygonMode_;
-      return *this;
-    }
-
-    PipelineRasterizationStateCreateInfo& setCullMode( CullModeFlags cullMode_ )
-    {
-      cullMode = cullMode_;
-      return *this;
-    }
-
-    PipelineRasterizationStateCreateInfo& setFrontFace( FrontFace frontFace_ )
-    {
-      frontFace = frontFace_;
-      return *this;
-    }
-
-    PipelineRasterizationStateCreateInfo& setDepthBiasEnable( Bool32 depthBiasEnable_ )
-    {
-      depthBiasEnable = depthBiasEnable_;
-      return *this;
-    }
-
-    PipelineRasterizationStateCreateInfo& setDepthBiasConstantFactor( float depthBiasConstantFactor_ )
-    {
-      depthBiasConstantFactor = depthBiasConstantFactor_;
-      return *this;
-    }
-
-    PipelineRasterizationStateCreateInfo& setDepthBiasClamp( float depthBiasClamp_ )
-    {
-      depthBiasClamp = depthBiasClamp_;
-      return *this;
-    }
-
-    PipelineRasterizationStateCreateInfo& setDepthBiasSlopeFactor( float depthBiasSlopeFactor_ )
-    {
-      depthBiasSlopeFactor = depthBiasSlopeFactor_;
-      return *this;
-    }
-
-    PipelineRasterizationStateCreateInfo& setLineWidth( float lineWidth_ )
-    {
-      lineWidth = lineWidth_;
-      return *this;
-    }
-
-    operator VkPipelineRasterizationStateCreateInfo const&() const
-    {
-      return *reinterpret_cast<const VkPipelineRasterizationStateCreateInfo*>(this);
-    }
-
-    operator VkPipelineRasterizationStateCreateInfo &()
-    {
-      return *reinterpret_cast<VkPipelineRasterizationStateCreateInfo*>(this);
-    }
-
-    bool operator==( PipelineRasterizationStateCreateInfo const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( depthClampEnable == rhs.depthClampEnable )
-          && ( rasterizerDiscardEnable == rhs.rasterizerDiscardEnable )
-          && ( polygonMode == rhs.polygonMode )
-          && ( cullMode == rhs.cullMode )
-          && ( frontFace == rhs.frontFace )
-          && ( depthBiasEnable == rhs.depthBiasEnable )
-          && ( depthBiasConstantFactor == rhs.depthBiasConstantFactor )
-          && ( depthBiasClamp == rhs.depthBiasClamp )
-          && ( depthBiasSlopeFactor == rhs.depthBiasSlopeFactor )
-          && ( lineWidth == rhs.lineWidth );
-    }
-
-    bool operator!=( PipelineRasterizationStateCreateInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePipelineRasterizationStateCreateInfo;
-
-  public:
-    const void* pNext = nullptr;
-    PipelineRasterizationStateCreateFlags flags;
-    Bool32 depthClampEnable;
-    Bool32 rasterizerDiscardEnable;
-    PolygonMode polygonMode;
-    CullModeFlags cullMode;
-    FrontFace frontFace;
-    Bool32 depthBiasEnable;
-    float depthBiasConstantFactor;
-    float depthBiasClamp;
-    float depthBiasSlopeFactor;
-    float lineWidth;
+    eCommand = VK_SYSTEM_ALLOCATION_SCOPE_COMMAND,
+    eObject = VK_SYSTEM_ALLOCATION_SCOPE_OBJECT,
+    eCache = VK_SYSTEM_ALLOCATION_SCOPE_CACHE,
+    eDevice = VK_SYSTEM_ALLOCATION_SCOPE_DEVICE,
+    eInstance = VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE
   };
-  static_assert( sizeof( PipelineRasterizationStateCreateInfo ) == sizeof( VkPipelineRasterizationStateCreateInfo ), "struct and wrapper have different size!" );
 
-  struct PipelineDepthStencilStateCreateInfo
+  VULKAN_HPP_INLINE std::string to_string( SystemAllocationScope value )
   {
-    PipelineDepthStencilStateCreateInfo( PipelineDepthStencilStateCreateFlags flags_ = PipelineDepthStencilStateCreateFlags(),
-                                         Bool32 depthTestEnable_ = 0,
-                                         Bool32 depthWriteEnable_ = 0,
-                                         CompareOp depthCompareOp_ = CompareOp::eNever,
-                                         Bool32 depthBoundsTestEnable_ = 0,
-                                         Bool32 stencilTestEnable_ = 0,
-                                         StencilOpState front_ = StencilOpState(),
-                                         StencilOpState back_ = StencilOpState(),
-                                         float minDepthBounds_ = 0,
-                                         float maxDepthBounds_ = 0 )
-      : flags( flags_ )
-      , depthTestEnable( depthTestEnable_ )
-      , depthWriteEnable( depthWriteEnable_ )
-      , depthCompareOp( depthCompareOp_ )
-      , depthBoundsTestEnable( depthBoundsTestEnable_ )
-      , stencilTestEnable( stencilTestEnable_ )
-      , front( front_ )
-      , back( back_ )
-      , minDepthBounds( minDepthBounds_ )
-      , maxDepthBounds( maxDepthBounds_ )
+    switch ( value )
     {
+      case SystemAllocationScope::eCommand : return "Command";
+      case SystemAllocationScope::eObject : return "Object";
+      case SystemAllocationScope::eCache : return "Cache";
+      case SystemAllocationScope::eDevice : return "Device";
+      case SystemAllocationScope::eInstance : return "Instance";
+      default: return "invalid";
     }
+  }
 
-    PipelineDepthStencilStateCreateInfo( VkPipelineDepthStencilStateCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PipelineDepthStencilStateCreateInfo ) );
-    }
-
-    PipelineDepthStencilStateCreateInfo& operator=( VkPipelineDepthStencilStateCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PipelineDepthStencilStateCreateInfo ) );
-      return *this;
-    }
-    PipelineDepthStencilStateCreateInfo& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    PipelineDepthStencilStateCreateInfo& setFlags( PipelineDepthStencilStateCreateFlags flags_ )
-    {
-      flags = flags_;
-      return *this;
-    }
-
-    PipelineDepthStencilStateCreateInfo& setDepthTestEnable( Bool32 depthTestEnable_ )
-    {
-      depthTestEnable = depthTestEnable_;
-      return *this;
-    }
-
-    PipelineDepthStencilStateCreateInfo& setDepthWriteEnable( Bool32 depthWriteEnable_ )
-    {
-      depthWriteEnable = depthWriteEnable_;
-      return *this;
-    }
-
-    PipelineDepthStencilStateCreateInfo& setDepthCompareOp( CompareOp depthCompareOp_ )
-    {
-      depthCompareOp = depthCompareOp_;
-      return *this;
-    }
-
-    PipelineDepthStencilStateCreateInfo& setDepthBoundsTestEnable( Bool32 depthBoundsTestEnable_ )
-    {
-      depthBoundsTestEnable = depthBoundsTestEnable_;
-      return *this;
-    }
-
-    PipelineDepthStencilStateCreateInfo& setStencilTestEnable( Bool32 stencilTestEnable_ )
-    {
-      stencilTestEnable = stencilTestEnable_;
-      return *this;
-    }
-
-    PipelineDepthStencilStateCreateInfo& setFront( StencilOpState front_ )
-    {
-      front = front_;
-      return *this;
-    }
-
-    PipelineDepthStencilStateCreateInfo& setBack( StencilOpState back_ )
-    {
-      back = back_;
-      return *this;
-    }
-
-    PipelineDepthStencilStateCreateInfo& setMinDepthBounds( float minDepthBounds_ )
-    {
-      minDepthBounds = minDepthBounds_;
-      return *this;
-    }
-
-    PipelineDepthStencilStateCreateInfo& setMaxDepthBounds( float maxDepthBounds_ )
-    {
-      maxDepthBounds = maxDepthBounds_;
-      return *this;
-    }
-
-    operator VkPipelineDepthStencilStateCreateInfo const&() const
-    {
-      return *reinterpret_cast<const VkPipelineDepthStencilStateCreateInfo*>(this);
-    }
-
-    operator VkPipelineDepthStencilStateCreateInfo &()
-    {
-      return *reinterpret_cast<VkPipelineDepthStencilStateCreateInfo*>(this);
-    }
-
-    bool operator==( PipelineDepthStencilStateCreateInfo const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( depthTestEnable == rhs.depthTestEnable )
-          && ( depthWriteEnable == rhs.depthWriteEnable )
-          && ( depthCompareOp == rhs.depthCompareOp )
-          && ( depthBoundsTestEnable == rhs.depthBoundsTestEnable )
-          && ( stencilTestEnable == rhs.stencilTestEnable )
-          && ( front == rhs.front )
-          && ( back == rhs.back )
-          && ( minDepthBounds == rhs.minDepthBounds )
-          && ( maxDepthBounds == rhs.maxDepthBounds );
-    }
-
-    bool operator!=( PipelineDepthStencilStateCreateInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePipelineDepthStencilStateCreateInfo;
-
-  public:
-    const void* pNext = nullptr;
-    PipelineDepthStencilStateCreateFlags flags;
-    Bool32 depthTestEnable;
-    Bool32 depthWriteEnable;
-    CompareOp depthCompareOp;
-    Bool32 depthBoundsTestEnable;
-    Bool32 stencilTestEnable;
-    StencilOpState front;
-    StencilOpState back;
-    float minDepthBounds;
-    float maxDepthBounds;
-  };
-  static_assert( sizeof( PipelineDepthStencilStateCreateInfo ) == sizeof( VkPipelineDepthStencilStateCreateInfo ), "struct and wrapper have different size!" );
-
-  struct PipelineCacheCreateInfo
+  enum class TessellationDomainOrigin
   {
-    PipelineCacheCreateInfo( PipelineCacheCreateFlags flags_ = PipelineCacheCreateFlags(),
-                             size_t initialDataSize_ = 0,
-                             const void* pInitialData_ = nullptr )
-      : flags( flags_ )
-      , initialDataSize( initialDataSize_ )
-      , pInitialData( pInitialData_ )
-    {
-    }
-
-    PipelineCacheCreateInfo( VkPipelineCacheCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PipelineCacheCreateInfo ) );
-    }
-
-    PipelineCacheCreateInfo& operator=( VkPipelineCacheCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PipelineCacheCreateInfo ) );
-      return *this;
-    }
-    PipelineCacheCreateInfo& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    PipelineCacheCreateInfo& setFlags( PipelineCacheCreateFlags flags_ )
-    {
-      flags = flags_;
-      return *this;
-    }
-
-    PipelineCacheCreateInfo& setInitialDataSize( size_t initialDataSize_ )
-    {
-      initialDataSize = initialDataSize_;
-      return *this;
-    }
-
-    PipelineCacheCreateInfo& setPInitialData( const void* pInitialData_ )
-    {
-      pInitialData = pInitialData_;
-      return *this;
-    }
-
-    operator VkPipelineCacheCreateInfo const&() const
-    {
-      return *reinterpret_cast<const VkPipelineCacheCreateInfo*>(this);
-    }
-
-    operator VkPipelineCacheCreateInfo &()
-    {
-      return *reinterpret_cast<VkPipelineCacheCreateInfo*>(this);
-    }
-
-    bool operator==( PipelineCacheCreateInfo const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( initialDataSize == rhs.initialDataSize )
-          && ( pInitialData == rhs.pInitialData );
-    }
-
-    bool operator!=( PipelineCacheCreateInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePipelineCacheCreateInfo;
-
-  public:
-    const void* pNext = nullptr;
-    PipelineCacheCreateFlags flags;
-    size_t initialDataSize;
-    const void* pInitialData;
+    eUpperLeft = VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT,
+    eLowerLeft = VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT
   };
-  static_assert( sizeof( PipelineCacheCreateInfo ) == sizeof( VkPipelineCacheCreateInfo ), "struct and wrapper have different size!" );
+  using TessellationDomainOriginKHR = TessellationDomainOrigin;
 
-  struct SamplerCreateInfo
+  VULKAN_HPP_INLINE std::string to_string( TessellationDomainOrigin value )
   {
-    SamplerCreateInfo( SamplerCreateFlags flags_ = SamplerCreateFlags(),
-                       Filter magFilter_ = Filter::eNearest,
-                       Filter minFilter_ = Filter::eNearest,
-                       SamplerMipmapMode mipmapMode_ = SamplerMipmapMode::eNearest,
-                       SamplerAddressMode addressModeU_ = SamplerAddressMode::eRepeat,
-                       SamplerAddressMode addressModeV_ = SamplerAddressMode::eRepeat,
-                       SamplerAddressMode addressModeW_ = SamplerAddressMode::eRepeat,
-                       float mipLodBias_ = 0,
-                       Bool32 anisotropyEnable_ = 0,
-                       float maxAnisotropy_ = 0,
-                       Bool32 compareEnable_ = 0,
-                       CompareOp compareOp_ = CompareOp::eNever,
-                       float minLod_ = 0,
-                       float maxLod_ = 0,
-                       BorderColor borderColor_ = BorderColor::eFloatTransparentBlack,
-                       Bool32 unnormalizedCoordinates_ = 0 )
-      : flags( flags_ )
-      , magFilter( magFilter_ )
-      , minFilter( minFilter_ )
-      , mipmapMode( mipmapMode_ )
-      , addressModeU( addressModeU_ )
-      , addressModeV( addressModeV_ )
-      , addressModeW( addressModeW_ )
-      , mipLodBias( mipLodBias_ )
-      , anisotropyEnable( anisotropyEnable_ )
-      , maxAnisotropy( maxAnisotropy_ )
-      , compareEnable( compareEnable_ )
-      , compareOp( compareOp_ )
-      , minLod( minLod_ )
-      , maxLod( maxLod_ )
-      , borderColor( borderColor_ )
-      , unnormalizedCoordinates( unnormalizedCoordinates_ )
+    switch ( value )
     {
+      case TessellationDomainOrigin::eUpperLeft : return "UpperLeft";
+      case TessellationDomainOrigin::eLowerLeft : return "LowerLeft";
+      default: return "invalid";
     }
+  }
 
-    SamplerCreateInfo( VkSamplerCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( SamplerCreateInfo ) );
-    }
-
-    SamplerCreateInfo& operator=( VkSamplerCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( SamplerCreateInfo ) );
-      return *this;
-    }
-    SamplerCreateInfo& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    SamplerCreateInfo& setFlags( SamplerCreateFlags flags_ )
-    {
-      flags = flags_;
-      return *this;
-    }
-
-    SamplerCreateInfo& setMagFilter( Filter magFilter_ )
-    {
-      magFilter = magFilter_;
-      return *this;
-    }
-
-    SamplerCreateInfo& setMinFilter( Filter minFilter_ )
-    {
-      minFilter = minFilter_;
-      return *this;
-    }
-
-    SamplerCreateInfo& setMipmapMode( SamplerMipmapMode mipmapMode_ )
-    {
-      mipmapMode = mipmapMode_;
-      return *this;
-    }
-
-    SamplerCreateInfo& setAddressModeU( SamplerAddressMode addressModeU_ )
-    {
-      addressModeU = addressModeU_;
-      return *this;
-    }
-
-    SamplerCreateInfo& setAddressModeV( SamplerAddressMode addressModeV_ )
-    {
-      addressModeV = addressModeV_;
-      return *this;
-    }
-
-    SamplerCreateInfo& setAddressModeW( SamplerAddressMode addressModeW_ )
-    {
-      addressModeW = addressModeW_;
-      return *this;
-    }
-
-    SamplerCreateInfo& setMipLodBias( float mipLodBias_ )
-    {
-      mipLodBias = mipLodBias_;
-      return *this;
-    }
-
-    SamplerCreateInfo& setAnisotropyEnable( Bool32 anisotropyEnable_ )
-    {
-      anisotropyEnable = anisotropyEnable_;
-      return *this;
-    }
-
-    SamplerCreateInfo& setMaxAnisotropy( float maxAnisotropy_ )
-    {
-      maxAnisotropy = maxAnisotropy_;
-      return *this;
-    }
-
-    SamplerCreateInfo& setCompareEnable( Bool32 compareEnable_ )
-    {
-      compareEnable = compareEnable_;
-      return *this;
-    }
-
-    SamplerCreateInfo& setCompareOp( CompareOp compareOp_ )
-    {
-      compareOp = compareOp_;
-      return *this;
-    }
-
-    SamplerCreateInfo& setMinLod( float minLod_ )
-    {
-      minLod = minLod_;
-      return *this;
-    }
-
-    SamplerCreateInfo& setMaxLod( float maxLod_ )
-    {
-      maxLod = maxLod_;
-      return *this;
-    }
-
-    SamplerCreateInfo& setBorderColor( BorderColor borderColor_ )
-    {
-      borderColor = borderColor_;
-      return *this;
-    }
-
-    SamplerCreateInfo& setUnnormalizedCoordinates( Bool32 unnormalizedCoordinates_ )
-    {
-      unnormalizedCoordinates = unnormalizedCoordinates_;
-      return *this;
-    }
-
-    operator VkSamplerCreateInfo const&() const
-    {
-      return *reinterpret_cast<const VkSamplerCreateInfo*>(this);
-    }
-
-    operator VkSamplerCreateInfo &()
-    {
-      return *reinterpret_cast<VkSamplerCreateInfo*>(this);
-    }
-
-    bool operator==( SamplerCreateInfo const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( magFilter == rhs.magFilter )
-          && ( minFilter == rhs.minFilter )
-          && ( mipmapMode == rhs.mipmapMode )
-          && ( addressModeU == rhs.addressModeU )
-          && ( addressModeV == rhs.addressModeV )
-          && ( addressModeW == rhs.addressModeW )
-          && ( mipLodBias == rhs.mipLodBias )
-          && ( anisotropyEnable == rhs.anisotropyEnable )
-          && ( maxAnisotropy == rhs.maxAnisotropy )
-          && ( compareEnable == rhs.compareEnable )
-          && ( compareOp == rhs.compareOp )
-          && ( minLod == rhs.minLod )
-          && ( maxLod == rhs.maxLod )
-          && ( borderColor == rhs.borderColor )
-          && ( unnormalizedCoordinates == rhs.unnormalizedCoordinates );
-    }
-
-    bool operator!=( SamplerCreateInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eSamplerCreateInfo;
-
-  public:
-    const void* pNext = nullptr;
-    SamplerCreateFlags flags;
-    Filter magFilter;
-    Filter minFilter;
-    SamplerMipmapMode mipmapMode;
-    SamplerAddressMode addressModeU;
-    SamplerAddressMode addressModeV;
-    SamplerAddressMode addressModeW;
-    float mipLodBias;
-    Bool32 anisotropyEnable;
-    float maxAnisotropy;
-    Bool32 compareEnable;
-    CompareOp compareOp;
-    float minLod;
-    float maxLod;
-    BorderColor borderColor;
-    Bool32 unnormalizedCoordinates;
-  };
-  static_assert( sizeof( SamplerCreateInfo ) == sizeof( VkSamplerCreateInfo ), "struct and wrapper have different size!" );
-
-  struct CommandBufferAllocateInfo
+  enum class TimeDomainEXT
   {
-    CommandBufferAllocateInfo( CommandPool commandPool_ = CommandPool(),
-                               CommandBufferLevel level_ = CommandBufferLevel::ePrimary,
-                               uint32_t commandBufferCount_ = 0 )
-      : commandPool( commandPool_ )
-      , level( level_ )
-      , commandBufferCount( commandBufferCount_ )
-    {
-    }
-
-    CommandBufferAllocateInfo( VkCommandBufferAllocateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( CommandBufferAllocateInfo ) );
-    }
-
-    CommandBufferAllocateInfo& operator=( VkCommandBufferAllocateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( CommandBufferAllocateInfo ) );
-      return *this;
-    }
-    CommandBufferAllocateInfo& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    CommandBufferAllocateInfo& setCommandPool( CommandPool commandPool_ )
-    {
-      commandPool = commandPool_;
-      return *this;
-    }
-
-    CommandBufferAllocateInfo& setLevel( CommandBufferLevel level_ )
-    {
-      level = level_;
-      return *this;
-    }
-
-    CommandBufferAllocateInfo& setCommandBufferCount( uint32_t commandBufferCount_ )
-    {
-      commandBufferCount = commandBufferCount_;
-      return *this;
-    }
-
-    operator VkCommandBufferAllocateInfo const&() const
-    {
-      return *reinterpret_cast<const VkCommandBufferAllocateInfo*>(this);
-    }
-
-    operator VkCommandBufferAllocateInfo &()
-    {
-      return *reinterpret_cast<VkCommandBufferAllocateInfo*>(this);
-    }
-
-    bool operator==( CommandBufferAllocateInfo const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( commandPool == rhs.commandPool )
-          && ( level == rhs.level )
-          && ( commandBufferCount == rhs.commandBufferCount );
-    }
-
-    bool operator!=( CommandBufferAllocateInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eCommandBufferAllocateInfo;
-
-  public:
-    const void* pNext = nullptr;
-    CommandPool commandPool;
-    CommandBufferLevel level;
-    uint32_t commandBufferCount;
+    eDevice = VK_TIME_DOMAIN_DEVICE_EXT,
+    eClockMonotonic = VK_TIME_DOMAIN_CLOCK_MONOTONIC_EXT,
+    eClockMonotonicRaw = VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_EXT,
+    eQueryPerformanceCounter = VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_EXT
   };
-  static_assert( sizeof( CommandBufferAllocateInfo ) == sizeof( VkCommandBufferAllocateInfo ), "struct and wrapper have different size!" );
 
-  struct RenderPassBeginInfo
+  VULKAN_HPP_INLINE std::string to_string( TimeDomainEXT value )
   {
-    RenderPassBeginInfo( RenderPass renderPass_ = RenderPass(),
-                         Framebuffer framebuffer_ = Framebuffer(),
-                         Rect2D renderArea_ = Rect2D(),
-                         uint32_t clearValueCount_ = 0,
-                         const ClearValue* pClearValues_ = nullptr )
-      : renderPass( renderPass_ )
-      , framebuffer( framebuffer_ )
-      , renderArea( renderArea_ )
-      , clearValueCount( clearValueCount_ )
-      , pClearValues( pClearValues_ )
+    switch ( value )
     {
+      case TimeDomainEXT::eDevice : return "Device";
+      case TimeDomainEXT::eClockMonotonic : return "ClockMonotonic";
+      case TimeDomainEXT::eClockMonotonicRaw : return "ClockMonotonicRaw";
+      case TimeDomainEXT::eQueryPerformanceCounter : return "QueryPerformanceCounter";
+      default: return "invalid";
     }
+  }
 
-    RenderPassBeginInfo( VkRenderPassBeginInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( RenderPassBeginInfo ) );
-    }
-
-    RenderPassBeginInfo& operator=( VkRenderPassBeginInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( RenderPassBeginInfo ) );
-      return *this;
-    }
-    RenderPassBeginInfo& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    RenderPassBeginInfo& setRenderPass( RenderPass renderPass_ )
-    {
-      renderPass = renderPass_;
-      return *this;
-    }
-
-    RenderPassBeginInfo& setFramebuffer( Framebuffer framebuffer_ )
-    {
-      framebuffer = framebuffer_;
-      return *this;
-    }
-
-    RenderPassBeginInfo& setRenderArea( Rect2D renderArea_ )
-    {
-      renderArea = renderArea_;
-      return *this;
-    }
-
-    RenderPassBeginInfo& setClearValueCount( uint32_t clearValueCount_ )
-    {
-      clearValueCount = clearValueCount_;
-      return *this;
-    }
-
-    RenderPassBeginInfo& setPClearValues( const ClearValue* pClearValues_ )
-    {
-      pClearValues = pClearValues_;
-      return *this;
-    }
-
-    operator VkRenderPassBeginInfo const&() const
-    {
-      return *reinterpret_cast<const VkRenderPassBeginInfo*>(this);
-    }
-
-    operator VkRenderPassBeginInfo &()
-    {
-      return *reinterpret_cast<VkRenderPassBeginInfo*>(this);
-    }
-
-    bool operator==( RenderPassBeginInfo const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( renderPass == rhs.renderPass )
-          && ( framebuffer == rhs.framebuffer )
-          && ( renderArea == rhs.renderArea )
-          && ( clearValueCount == rhs.clearValueCount )
-          && ( pClearValues == rhs.pClearValues );
-    }
-
-    bool operator!=( RenderPassBeginInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eRenderPassBeginInfo;
-
-  public:
-    const void* pNext = nullptr;
-    RenderPass renderPass;
-    Framebuffer framebuffer;
-    Rect2D renderArea;
-    uint32_t clearValueCount;
-    const ClearValue* pClearValues;
-  };
-  static_assert( sizeof( RenderPassBeginInfo ) == sizeof( VkRenderPassBeginInfo ), "struct and wrapper have different size!" );
-
-  struct EventCreateInfo
+  enum class ToolPurposeFlagBitsEXT : VkToolPurposeFlagsEXT
   {
-    EventCreateInfo( EventCreateFlags flags_ = EventCreateFlags() )
-      : flags( flags_ )
-    {
-    }
-
-    EventCreateInfo( VkEventCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( EventCreateInfo ) );
-    }
-
-    EventCreateInfo& operator=( VkEventCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( EventCreateInfo ) );
-      return *this;
-    }
-    EventCreateInfo& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    EventCreateInfo& setFlags( EventCreateFlags flags_ )
-    {
-      flags = flags_;
-      return *this;
-    }
-
-    operator VkEventCreateInfo const&() const
-    {
-      return *reinterpret_cast<const VkEventCreateInfo*>(this);
-    }
-
-    operator VkEventCreateInfo &()
-    {
-      return *reinterpret_cast<VkEventCreateInfo*>(this);
-    }
-
-    bool operator==( EventCreateInfo const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags );
-    }
-
-    bool operator!=( EventCreateInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eEventCreateInfo;
-
-  public:
-    const void* pNext = nullptr;
-    EventCreateFlags flags;
+    eValidation = VK_TOOL_PURPOSE_VALIDATION_BIT_EXT,
+    eProfiling = VK_TOOL_PURPOSE_PROFILING_BIT_EXT,
+    eTracing = VK_TOOL_PURPOSE_TRACING_BIT_EXT,
+    eAdditionalFeatures = VK_TOOL_PURPOSE_ADDITIONAL_FEATURES_BIT_EXT,
+    eModifyingFeatures = VK_TOOL_PURPOSE_MODIFYING_FEATURES_BIT_EXT,
+    eDebugReporting = VK_TOOL_PURPOSE_DEBUG_REPORTING_BIT_EXT,
+    eDebugMarkers = VK_TOOL_PURPOSE_DEBUG_MARKERS_BIT_EXT
   };
-  static_assert( sizeof( EventCreateInfo ) == sizeof( VkEventCreateInfo ), "struct and wrapper have different size!" );
 
-  struct SemaphoreCreateInfo
+  VULKAN_HPP_INLINE std::string to_string( ToolPurposeFlagBitsEXT value )
   {
-    SemaphoreCreateInfo( SemaphoreCreateFlags flags_ = SemaphoreCreateFlags() )
-      : flags( flags_ )
+    switch ( value )
     {
+      case ToolPurposeFlagBitsEXT::eValidation : return "Validation";
+      case ToolPurposeFlagBitsEXT::eProfiling : return "Profiling";
+      case ToolPurposeFlagBitsEXT::eTracing : return "Tracing";
+      case ToolPurposeFlagBitsEXT::eAdditionalFeatures : return "AdditionalFeatures";
+      case ToolPurposeFlagBitsEXT::eModifyingFeatures : return "ModifyingFeatures";
+      case ToolPurposeFlagBitsEXT::eDebugReporting : return "DebugReporting";
+      case ToolPurposeFlagBitsEXT::eDebugMarkers : return "DebugMarkers";
+      default: return "invalid";
     }
+  }
 
-    SemaphoreCreateInfo( VkSemaphoreCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( SemaphoreCreateInfo ) );
-    }
-
-    SemaphoreCreateInfo& operator=( VkSemaphoreCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( SemaphoreCreateInfo ) );
-      return *this;
-    }
-    SemaphoreCreateInfo& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    SemaphoreCreateInfo& setFlags( SemaphoreCreateFlags flags_ )
-    {
-      flags = flags_;
-      return *this;
-    }
-
-    operator VkSemaphoreCreateInfo const&() const
-    {
-      return *reinterpret_cast<const VkSemaphoreCreateInfo*>(this);
-    }
-
-    operator VkSemaphoreCreateInfo &()
-    {
-      return *reinterpret_cast<VkSemaphoreCreateInfo*>(this);
-    }
-
-    bool operator==( SemaphoreCreateInfo const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags );
-    }
-
-    bool operator!=( SemaphoreCreateInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eSemaphoreCreateInfo;
-
-  public:
-    const void* pNext = nullptr;
-    SemaphoreCreateFlags flags;
-  };
-  static_assert( sizeof( SemaphoreCreateInfo ) == sizeof( VkSemaphoreCreateInfo ), "struct and wrapper have different size!" );
-
-  struct FramebufferCreateInfo
+  enum class ValidationCacheHeaderVersionEXT
   {
-    FramebufferCreateInfo( FramebufferCreateFlags flags_ = FramebufferCreateFlags(),
-                           RenderPass renderPass_ = RenderPass(),
-                           uint32_t attachmentCount_ = 0,
-                           const ImageView* pAttachments_ = nullptr,
-                           uint32_t width_ = 0,
-                           uint32_t height_ = 0,
-                           uint32_t layers_ = 0 )
-      : flags( flags_ )
-      , renderPass( renderPass_ )
-      , attachmentCount( attachmentCount_ )
-      , pAttachments( pAttachments_ )
-      , width( width_ )
-      , height( height_ )
-      , layers( layers_ )
-    {
-    }
-
-    FramebufferCreateInfo( VkFramebufferCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( FramebufferCreateInfo ) );
-    }
-
-    FramebufferCreateInfo& operator=( VkFramebufferCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( FramebufferCreateInfo ) );
-      return *this;
-    }
-    FramebufferCreateInfo& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    FramebufferCreateInfo& setFlags( FramebufferCreateFlags flags_ )
-    {
-      flags = flags_;
-      return *this;
-    }
-
-    FramebufferCreateInfo& setRenderPass( RenderPass renderPass_ )
-    {
-      renderPass = renderPass_;
-      return *this;
-    }
-
-    FramebufferCreateInfo& setAttachmentCount( uint32_t attachmentCount_ )
-    {
-      attachmentCount = attachmentCount_;
-      return *this;
-    }
-
-    FramebufferCreateInfo& setPAttachments( const ImageView* pAttachments_ )
-    {
-      pAttachments = pAttachments_;
-      return *this;
-    }
-
-    FramebufferCreateInfo& setWidth( uint32_t width_ )
-    {
-      width = width_;
-      return *this;
-    }
-
-    FramebufferCreateInfo& setHeight( uint32_t height_ )
-    {
-      height = height_;
-      return *this;
-    }
-
-    FramebufferCreateInfo& setLayers( uint32_t layers_ )
-    {
-      layers = layers_;
-      return *this;
-    }
-
-    operator VkFramebufferCreateInfo const&() const
-    {
-      return *reinterpret_cast<const VkFramebufferCreateInfo*>(this);
-    }
-
-    operator VkFramebufferCreateInfo &()
-    {
-      return *reinterpret_cast<VkFramebufferCreateInfo*>(this);
-    }
-
-    bool operator==( FramebufferCreateInfo const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( renderPass == rhs.renderPass )
-          && ( attachmentCount == rhs.attachmentCount )
-          && ( pAttachments == rhs.pAttachments )
-          && ( width == rhs.width )
-          && ( height == rhs.height )
-          && ( layers == rhs.layers );
-    }
-
-    bool operator!=( FramebufferCreateInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eFramebufferCreateInfo;
-
-  public:
-    const void* pNext = nullptr;
-    FramebufferCreateFlags flags;
-    RenderPass renderPass;
-    uint32_t attachmentCount;
-    const ImageView* pAttachments;
-    uint32_t width;
-    uint32_t height;
-    uint32_t layers;
+    eOne = VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT
   };
-  static_assert( sizeof( FramebufferCreateInfo ) == sizeof( VkFramebufferCreateInfo ), "struct and wrapper have different size!" );
 
-  struct DisplayModeCreateInfoKHR
+  VULKAN_HPP_INLINE std::string to_string( ValidationCacheHeaderVersionEXT value )
   {
-    DisplayModeCreateInfoKHR( DisplayModeCreateFlagsKHR flags_ = DisplayModeCreateFlagsKHR(),
-                              DisplayModeParametersKHR parameters_ = DisplayModeParametersKHR() )
-      : flags( flags_ )
-      , parameters( parameters_ )
+    switch ( value )
     {
+      case ValidationCacheHeaderVersionEXT::eOne : return "One";
+      default: return "invalid";
     }
+  }
 
-    DisplayModeCreateInfoKHR( VkDisplayModeCreateInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DisplayModeCreateInfoKHR ) );
-    }
-
-    DisplayModeCreateInfoKHR& operator=( VkDisplayModeCreateInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DisplayModeCreateInfoKHR ) );
-      return *this;
-    }
-    DisplayModeCreateInfoKHR& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    DisplayModeCreateInfoKHR& setFlags( DisplayModeCreateFlagsKHR flags_ )
-    {
-      flags = flags_;
-      return *this;
-    }
-
-    DisplayModeCreateInfoKHR& setParameters( DisplayModeParametersKHR parameters_ )
-    {
-      parameters = parameters_;
-      return *this;
-    }
-
-    operator VkDisplayModeCreateInfoKHR const&() const
-    {
-      return *reinterpret_cast<const VkDisplayModeCreateInfoKHR*>(this);
-    }
-
-    operator VkDisplayModeCreateInfoKHR &()
-    {
-      return *reinterpret_cast<VkDisplayModeCreateInfoKHR*>(this);
-    }
-
-    bool operator==( DisplayModeCreateInfoKHR const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( parameters == rhs.parameters );
-    }
-
-    bool operator!=( DisplayModeCreateInfoKHR const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eDisplayModeCreateInfoKHR;
-
-  public:
-    const void* pNext = nullptr;
-    DisplayModeCreateFlagsKHR flags;
-    DisplayModeParametersKHR parameters;
-  };
-  static_assert( sizeof( DisplayModeCreateInfoKHR ) == sizeof( VkDisplayModeCreateInfoKHR ), "struct and wrapper have different size!" );
-
-  struct DisplayPresentInfoKHR
+  enum class ValidationCheckEXT
   {
-    DisplayPresentInfoKHR( Rect2D srcRect_ = Rect2D(),
-                           Rect2D dstRect_ = Rect2D(),
-                           Bool32 persistent_ = 0 )
-      : srcRect( srcRect_ )
-      , dstRect( dstRect_ )
-      , persistent( persistent_ )
-    {
-    }
-
-    DisplayPresentInfoKHR( VkDisplayPresentInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DisplayPresentInfoKHR ) );
-    }
-
-    DisplayPresentInfoKHR& operator=( VkDisplayPresentInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DisplayPresentInfoKHR ) );
-      return *this;
-    }
-    DisplayPresentInfoKHR& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    DisplayPresentInfoKHR& setSrcRect( Rect2D srcRect_ )
-    {
-      srcRect = srcRect_;
-      return *this;
-    }
-
-    DisplayPresentInfoKHR& setDstRect( Rect2D dstRect_ )
-    {
-      dstRect = dstRect_;
-      return *this;
-    }
-
-    DisplayPresentInfoKHR& setPersistent( Bool32 persistent_ )
-    {
-      persistent = persistent_;
-      return *this;
-    }
-
-    operator VkDisplayPresentInfoKHR const&() const
-    {
-      return *reinterpret_cast<const VkDisplayPresentInfoKHR*>(this);
-    }
-
-    operator VkDisplayPresentInfoKHR &()
-    {
-      return *reinterpret_cast<VkDisplayPresentInfoKHR*>(this);
-    }
-
-    bool operator==( DisplayPresentInfoKHR const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( srcRect == rhs.srcRect )
-          && ( dstRect == rhs.dstRect )
-          && ( persistent == rhs.persistent );
-    }
-
-    bool operator!=( DisplayPresentInfoKHR const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eDisplayPresentInfoKHR;
-
-  public:
-    const void* pNext = nullptr;
-    Rect2D srcRect;
-    Rect2D dstRect;
-    Bool32 persistent;
+    eAll = VK_VALIDATION_CHECK_ALL_EXT,
+    eShaders = VK_VALIDATION_CHECK_SHADERS_EXT
   };
-  static_assert( sizeof( DisplayPresentInfoKHR ) == sizeof( VkDisplayPresentInfoKHR ), "struct and wrapper have different size!" );
+
+  VULKAN_HPP_INLINE std::string to_string( ValidationCheckEXT value )
+  {
+    switch ( value )
+    {
+      case ValidationCheckEXT::eAll : return "All";
+      case ValidationCheckEXT::eShaders : return "Shaders";
+      default: return "invalid";
+    }
+  }
+
+  enum class ValidationFeatureDisableEXT
+  {
+    eAll = VK_VALIDATION_FEATURE_DISABLE_ALL_EXT,
+    eShaders = VK_VALIDATION_FEATURE_DISABLE_SHADERS_EXT,
+    eThreadSafety = VK_VALIDATION_FEATURE_DISABLE_THREAD_SAFETY_EXT,
+    eApiParameters = VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT,
+    eObjectLifetimes = VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT,
+    eCoreChecks = VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT,
+    eUniqueHandles = VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( ValidationFeatureDisableEXT value )
+  {
+    switch ( value )
+    {
+      case ValidationFeatureDisableEXT::eAll : return "All";
+      case ValidationFeatureDisableEXT::eShaders : return "Shaders";
+      case ValidationFeatureDisableEXT::eThreadSafety : return "ThreadSafety";
+      case ValidationFeatureDisableEXT::eApiParameters : return "ApiParameters";
+      case ValidationFeatureDisableEXT::eObjectLifetimes : return "ObjectLifetimes";
+      case ValidationFeatureDisableEXT::eCoreChecks : return "CoreChecks";
+      case ValidationFeatureDisableEXT::eUniqueHandles : return "UniqueHandles";
+      default: return "invalid";
+    }
+  }
+
+  enum class ValidationFeatureEnableEXT
+  {
+    eGpuAssisted = VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT,
+    eGpuAssistedReserveBindingSlot = VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT,
+    eBestPractices = VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT,
+    eDebugPrintf = VK_VALIDATION_FEATURE_ENABLE_DEBUG_PRINTF_EXT,
+    eSynchronizationValidation = VK_VALIDATION_FEATURE_ENABLE_SYNCHRONIZATION_VALIDATION_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( ValidationFeatureEnableEXT value )
+  {
+    switch ( value )
+    {
+      case ValidationFeatureEnableEXT::eGpuAssisted : return "GpuAssisted";
+      case ValidationFeatureEnableEXT::eGpuAssistedReserveBindingSlot : return "GpuAssistedReserveBindingSlot";
+      case ValidationFeatureEnableEXT::eBestPractices : return "BestPractices";
+      case ValidationFeatureEnableEXT::eDebugPrintf : return "DebugPrintf";
+      case ValidationFeatureEnableEXT::eSynchronizationValidation : return "SynchronizationValidation";
+      default: return "invalid";
+    }
+  }
+
+  enum class VendorId
+  {
+    eVIV = VK_VENDOR_ID_VIV,
+    eVSI = VK_VENDOR_ID_VSI,
+    eKazan = VK_VENDOR_ID_KAZAN,
+    eCodeplay = VK_VENDOR_ID_CODEPLAY,
+    eMESA = VK_VENDOR_ID_MESA
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( VendorId value )
+  {
+    switch ( value )
+    {
+      case VendorId::eVIV : return "VIV";
+      case VendorId::eVSI : return "VSI";
+      case VendorId::eKazan : return "Kazan";
+      case VendorId::eCodeplay : return "Codeplay";
+      case VendorId::eMESA : return "MESA";
+      default: return "invalid";
+    }
+  }
+
+  enum class VertexInputRate
+  {
+    eVertex = VK_VERTEX_INPUT_RATE_VERTEX,
+    eInstance = VK_VERTEX_INPUT_RATE_INSTANCE
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( VertexInputRate value )
+  {
+    switch ( value )
+    {
+      case VertexInputRate::eVertex : return "Vertex";
+      case VertexInputRate::eInstance : return "Instance";
+      default: return "invalid";
+    }
+  }
+
+  enum class ViewportCoordinateSwizzleNV
+  {
+    ePositiveX = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV,
+    eNegativeX = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_X_NV,
+    ePositiveY = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Y_NV,
+    eNegativeY = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Y_NV,
+    ePositiveZ = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Z_NV,
+    eNegativeZ = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Z_NV,
+    ePositiveW = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_W_NV,
+    eNegativeW = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( ViewportCoordinateSwizzleNV value )
+  {
+    switch ( value )
+    {
+      case ViewportCoordinateSwizzleNV::ePositiveX : return "PositiveX";
+      case ViewportCoordinateSwizzleNV::eNegativeX : return "NegativeX";
+      case ViewportCoordinateSwizzleNV::ePositiveY : return "PositiveY";
+      case ViewportCoordinateSwizzleNV::eNegativeY : return "NegativeY";
+      case ViewportCoordinateSwizzleNV::ePositiveZ : return "PositiveZ";
+      case ViewportCoordinateSwizzleNV::eNegativeZ : return "NegativeZ";
+      case ViewportCoordinateSwizzleNV::ePositiveW : return "PositiveW";
+      case ViewportCoordinateSwizzleNV::eNegativeW : return "NegativeW";
+      default: return "invalid";
+    }
+  }
+
+  template<typename T>
+  struct IndexTypeValue
+  {};
+
+  template <>
+  struct IndexTypeValue<uint16_t>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR IndexType value = IndexType::eUint16;
+  };
+
+  template <>
+  struct CppType<IndexType, IndexType::eUint16>
+  {
+    using Type = uint16_t;
+  };
+
+  template <>
+  struct IndexTypeValue<uint32_t>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR IndexType value = IndexType::eUint32;
+  };
+
+  template <>
+  struct CppType<IndexType, IndexType::eUint32>
+  {
+    using Type = uint32_t;
+  };
+
+  template <>
+  struct IndexTypeValue<uint8_t>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR IndexType value = IndexType::eUint8EXT;
+  };
+
+  template <>
+  struct CppType<IndexType, IndexType::eUint8EXT>
+  {
+    using Type = uint8_t;
+  };
+
+
+  using AccessFlags = Flags<AccessFlagBits>;
+
+  template <> struct FlagTraits<AccessFlagBits>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(AccessFlagBits::eIndirectCommandRead) | VkFlags(AccessFlagBits::eIndexRead) | VkFlags(AccessFlagBits::eVertexAttributeRead) | VkFlags(AccessFlagBits::eUniformRead) | VkFlags(AccessFlagBits::eInputAttachmentRead) | VkFlags(AccessFlagBits::eShaderRead) | VkFlags(AccessFlagBits::eShaderWrite) | VkFlags(AccessFlagBits::eColorAttachmentRead) | VkFlags(AccessFlagBits::eColorAttachmentWrite) | VkFlags(AccessFlagBits::eDepthStencilAttachmentRead) | VkFlags(AccessFlagBits::eDepthStencilAttachmentWrite) | VkFlags(AccessFlagBits::eTransferRead) | VkFlags(AccessFlagBits::eTransferWrite) | VkFlags(AccessFlagBits::eHostRead) | VkFlags(AccessFlagBits::eHostWrite) | VkFlags(AccessFlagBits::eMemoryRead) | VkFlags(AccessFlagBits::eMemoryWrite) | VkFlags(AccessFlagBits::eTransformFeedbackWriteEXT) | VkFlags(AccessFlagBits::eTransformFeedbackCounterReadEXT) | VkFlags(AccessFlagBits::eTransformFeedbackCounterWriteEXT) | VkFlags(AccessFlagBits::eConditionalRenderingReadEXT) | VkFlags(AccessFlagBits::eColorAttachmentReadNoncoherentEXT) | VkFlags(AccessFlagBits::eAccelerationStructureReadKHR) | VkFlags(AccessFlagBits::eAccelerationStructureWriteKHR) | VkFlags(AccessFlagBits::eShadingRateImageReadNV) | VkFlags(AccessFlagBits::eFragmentDensityMapReadEXT) | VkFlags(AccessFlagBits::eCommandPreprocessReadNV) | VkFlags(AccessFlagBits::eCommandPreprocessWriteNV)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags operator|( AccessFlagBits bit0, AccessFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return AccessFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags operator&( AccessFlagBits bit0, AccessFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return AccessFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags operator^( AccessFlagBits bit0, AccessFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return AccessFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags operator~( AccessFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( AccessFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( AccessFlags value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & AccessFlagBits::eIndirectCommandRead ) result += "IndirectCommandRead | ";
+    if ( value & AccessFlagBits::eIndexRead ) result += "IndexRead | ";
+    if ( value & AccessFlagBits::eVertexAttributeRead ) result += "VertexAttributeRead | ";
+    if ( value & AccessFlagBits::eUniformRead ) result += "UniformRead | ";
+    if ( value & AccessFlagBits::eInputAttachmentRead ) result += "InputAttachmentRead | ";
+    if ( value & AccessFlagBits::eShaderRead ) result += "ShaderRead | ";
+    if ( value & AccessFlagBits::eShaderWrite ) result += "ShaderWrite | ";
+    if ( value & AccessFlagBits::eColorAttachmentRead ) result += "ColorAttachmentRead | ";
+    if ( value & AccessFlagBits::eColorAttachmentWrite ) result += "ColorAttachmentWrite | ";
+    if ( value & AccessFlagBits::eDepthStencilAttachmentRead ) result += "DepthStencilAttachmentRead | ";
+    if ( value & AccessFlagBits::eDepthStencilAttachmentWrite ) result += "DepthStencilAttachmentWrite | ";
+    if ( value & AccessFlagBits::eTransferRead ) result += "TransferRead | ";
+    if ( value & AccessFlagBits::eTransferWrite ) result += "TransferWrite | ";
+    if ( value & AccessFlagBits::eHostRead ) result += "HostRead | ";
+    if ( value & AccessFlagBits::eHostWrite ) result += "HostWrite | ";
+    if ( value & AccessFlagBits::eMemoryRead ) result += "MemoryRead | ";
+    if ( value & AccessFlagBits::eMemoryWrite ) result += "MemoryWrite | ";
+    if ( value & AccessFlagBits::eTransformFeedbackWriteEXT ) result += "TransformFeedbackWriteEXT | ";
+    if ( value & AccessFlagBits::eTransformFeedbackCounterReadEXT ) result += "TransformFeedbackCounterReadEXT | ";
+    if ( value & AccessFlagBits::eTransformFeedbackCounterWriteEXT ) result += "TransformFeedbackCounterWriteEXT | ";
+    if ( value & AccessFlagBits::eConditionalRenderingReadEXT ) result += "ConditionalRenderingReadEXT | ";
+    if ( value & AccessFlagBits::eColorAttachmentReadNoncoherentEXT ) result += "ColorAttachmentReadNoncoherentEXT | ";
+    if ( value & AccessFlagBits::eAccelerationStructureReadKHR ) result += "AccelerationStructureReadKHR | ";
+    if ( value & AccessFlagBits::eAccelerationStructureWriteKHR ) result += "AccelerationStructureWriteKHR | ";
+    if ( value & AccessFlagBits::eShadingRateImageReadNV ) result += "ShadingRateImageReadNV | ";
+    if ( value & AccessFlagBits::eFragmentDensityMapReadEXT ) result += "FragmentDensityMapReadEXT | ";
+    if ( value & AccessFlagBits::eCommandPreprocessReadNV ) result += "CommandPreprocessReadNV | ";
+    if ( value & AccessFlagBits::eCommandPreprocessWriteNV ) result += "CommandPreprocessWriteNV | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+
+  using AcquireProfilingLockFlagsKHR = Flags<AcquireProfilingLockFlagBitsKHR>;
+
+  VULKAN_HPP_INLINE std::string to_string( AcquireProfilingLockFlagsKHR  )
+  {
+
+    return "{}";
+  }
 
 #ifdef VK_USE_PLATFORM_ANDROID_KHR
-  struct AndroidSurfaceCreateInfoKHR
+  enum class AndroidSurfaceCreateFlagBitsKHR : VkFlags
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( AndroidSurfaceCreateFlagBitsKHR )
   {
-    AndroidSurfaceCreateInfoKHR( AndroidSurfaceCreateFlagsKHR flags_ = AndroidSurfaceCreateFlagsKHR(),
-                                 struct ANativeWindow* window_ = nullptr )
-      : flags( flags_ )
-      , window( window_ )
-    {
-    }
+    return "(void)";
+  }
 
-    AndroidSurfaceCreateInfoKHR( VkAndroidSurfaceCreateInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( AndroidSurfaceCreateInfoKHR ) );
-    }
+  using AndroidSurfaceCreateFlagsKHR = Flags<AndroidSurfaceCreateFlagBitsKHR>;
 
-    AndroidSurfaceCreateInfoKHR& operator=( VkAndroidSurfaceCreateInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( AndroidSurfaceCreateInfoKHR ) );
-      return *this;
-    }
-    AndroidSurfaceCreateInfoKHR& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
+  VULKAN_HPP_INLINE std::string to_string( AndroidSurfaceCreateFlagsKHR  )
+  {
 
-    AndroidSurfaceCreateInfoKHR& setFlags( AndroidSurfaceCreateFlagsKHR flags_ )
-    {
-      flags = flags_;
-      return *this;
-    }
-
-    AndroidSurfaceCreateInfoKHR& setWindow( struct ANativeWindow* window_ )
-    {
-      window = window_;
-      return *this;
-    }
-
-    operator VkAndroidSurfaceCreateInfoKHR const&() const
-    {
-      return *reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR*>(this);
-    }
-
-    operator VkAndroidSurfaceCreateInfoKHR &()
-    {
-      return *reinterpret_cast<VkAndroidSurfaceCreateInfoKHR*>(this);
-    }
-
-    bool operator==( AndroidSurfaceCreateInfoKHR const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( window == rhs.window );
-    }
-
-    bool operator!=( AndroidSurfaceCreateInfoKHR const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eAndroidSurfaceCreateInfoKHR;
-
-  public:
-    const void* pNext = nullptr;
-    AndroidSurfaceCreateFlagsKHR flags;
-    struct ANativeWindow* window;
-  };
-  static_assert( sizeof( AndroidSurfaceCreateInfoKHR ) == sizeof( VkAndroidSurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
+    return "{}";
+  }
 #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
 
-#ifdef VK_USE_PLATFORM_VI_NN
-  struct ViSurfaceCreateInfoNN
+
+  using AttachmentDescriptionFlags = Flags<AttachmentDescriptionFlagBits>;
+
+  template <> struct FlagTraits<AttachmentDescriptionFlagBits>
   {
-    ViSurfaceCreateInfoNN( ViSurfaceCreateFlagsNN flags_ = ViSurfaceCreateFlagsNN(),
-                           void* window_ = nullptr )
-      : flags( flags_ )
-      , window( window_ )
+    enum : VkFlags
     {
-    }
-
-    ViSurfaceCreateInfoNN( VkViSurfaceCreateInfoNN const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ViSurfaceCreateInfoNN ) );
-    }
-
-    ViSurfaceCreateInfoNN& operator=( VkViSurfaceCreateInfoNN const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ViSurfaceCreateInfoNN ) );
-      return *this;
-    }
-    ViSurfaceCreateInfoNN& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    ViSurfaceCreateInfoNN& setFlags( ViSurfaceCreateFlagsNN flags_ )
-    {
-      flags = flags_;
-      return *this;
-    }
-
-    ViSurfaceCreateInfoNN& setWindow( void* window_ )
-    {
-      window = window_;
-      return *this;
-    }
-
-    operator VkViSurfaceCreateInfoNN const&() const
-    {
-      return *reinterpret_cast<const VkViSurfaceCreateInfoNN*>(this);
-    }
-
-    operator VkViSurfaceCreateInfoNN &()
-    {
-      return *reinterpret_cast<VkViSurfaceCreateInfoNN*>(this);
-    }
-
-    bool operator==( ViSurfaceCreateInfoNN const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( window == rhs.window );
-    }
-
-    bool operator!=( ViSurfaceCreateInfoNN const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eViSurfaceCreateInfoNN;
-
-  public:
-    const void* pNext = nullptr;
-    ViSurfaceCreateFlagsNN flags;
-    void* window;
+      allFlags = VkFlags(AttachmentDescriptionFlagBits::eMayAlias)
+    };
   };
-  static_assert( sizeof( ViSurfaceCreateInfoNN ) == sizeof( VkViSurfaceCreateInfoNN ), "struct and wrapper have different size!" );
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AttachmentDescriptionFlags operator|( AttachmentDescriptionFlagBits bit0, AttachmentDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return AttachmentDescriptionFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AttachmentDescriptionFlags operator&( AttachmentDescriptionFlagBits bit0, AttachmentDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return AttachmentDescriptionFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AttachmentDescriptionFlags operator^( AttachmentDescriptionFlagBits bit0, AttachmentDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return AttachmentDescriptionFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AttachmentDescriptionFlags operator~( AttachmentDescriptionFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( AttachmentDescriptionFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( AttachmentDescriptionFlags value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & AttachmentDescriptionFlagBits::eMayAlias ) result += "MayAlias | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+
+  using BufferCreateFlags = Flags<BufferCreateFlagBits>;
+
+  template <> struct FlagTraits<BufferCreateFlagBits>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(BufferCreateFlagBits::eSparseBinding) | VkFlags(BufferCreateFlagBits::eSparseResidency) | VkFlags(BufferCreateFlagBits::eSparseAliased) | VkFlags(BufferCreateFlagBits::eProtected) | VkFlags(BufferCreateFlagBits::eDeviceAddressCaptureReplay)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferCreateFlags operator|( BufferCreateFlagBits bit0, BufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return BufferCreateFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferCreateFlags operator&( BufferCreateFlagBits bit0, BufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return BufferCreateFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferCreateFlags operator^( BufferCreateFlagBits bit0, BufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return BufferCreateFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferCreateFlags operator~( BufferCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( BufferCreateFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( BufferCreateFlags value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & BufferCreateFlagBits::eSparseBinding ) result += "SparseBinding | ";
+    if ( value & BufferCreateFlagBits::eSparseResidency ) result += "SparseResidency | ";
+    if ( value & BufferCreateFlagBits::eSparseAliased ) result += "SparseAliased | ";
+    if ( value & BufferCreateFlagBits::eProtected ) result += "Protected | ";
+    if ( value & BufferCreateFlagBits::eDeviceAddressCaptureReplay ) result += "DeviceAddressCaptureReplay | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+
+  using BufferUsageFlags = Flags<BufferUsageFlagBits>;
+
+  template <> struct FlagTraits<BufferUsageFlagBits>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(BufferUsageFlagBits::eTransferSrc) | VkFlags(BufferUsageFlagBits::eTransferDst) | VkFlags(BufferUsageFlagBits::eUniformTexelBuffer) | VkFlags(BufferUsageFlagBits::eStorageTexelBuffer) | VkFlags(BufferUsageFlagBits::eUniformBuffer) | VkFlags(BufferUsageFlagBits::eStorageBuffer) | VkFlags(BufferUsageFlagBits::eIndexBuffer) | VkFlags(BufferUsageFlagBits::eVertexBuffer) | VkFlags(BufferUsageFlagBits::eIndirectBuffer) | VkFlags(BufferUsageFlagBits::eShaderDeviceAddress) | VkFlags(BufferUsageFlagBits::eTransformFeedbackBufferEXT) | VkFlags(BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT) | VkFlags(BufferUsageFlagBits::eConditionalRenderingEXT) | VkFlags(BufferUsageFlagBits::eRayTracingKHR)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferUsageFlags operator|( BufferUsageFlagBits bit0, BufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return BufferUsageFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferUsageFlags operator&( BufferUsageFlagBits bit0, BufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return BufferUsageFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferUsageFlags operator^( BufferUsageFlagBits bit0, BufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return BufferUsageFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferUsageFlags operator~( BufferUsageFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( BufferUsageFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( BufferUsageFlags value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & BufferUsageFlagBits::eTransferSrc ) result += "TransferSrc | ";
+    if ( value & BufferUsageFlagBits::eTransferDst ) result += "TransferDst | ";
+    if ( value & BufferUsageFlagBits::eUniformTexelBuffer ) result += "UniformTexelBuffer | ";
+    if ( value & BufferUsageFlagBits::eStorageTexelBuffer ) result += "StorageTexelBuffer | ";
+    if ( value & BufferUsageFlagBits::eUniformBuffer ) result += "UniformBuffer | ";
+    if ( value & BufferUsageFlagBits::eStorageBuffer ) result += "StorageBuffer | ";
+    if ( value & BufferUsageFlagBits::eIndexBuffer ) result += "IndexBuffer | ";
+    if ( value & BufferUsageFlagBits::eVertexBuffer ) result += "VertexBuffer | ";
+    if ( value & BufferUsageFlagBits::eIndirectBuffer ) result += "IndirectBuffer | ";
+    if ( value & BufferUsageFlagBits::eShaderDeviceAddress ) result += "ShaderDeviceAddress | ";
+    if ( value & BufferUsageFlagBits::eTransformFeedbackBufferEXT ) result += "TransformFeedbackBufferEXT | ";
+    if ( value & BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT ) result += "TransformFeedbackCounterBufferEXT | ";
+    if ( value & BufferUsageFlagBits::eConditionalRenderingEXT ) result += "ConditionalRenderingEXT | ";
+    if ( value & BufferUsageFlagBits::eRayTracingKHR ) result += "RayTracingKHR | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class BufferViewCreateFlagBits : VkFlags
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( BufferViewCreateFlagBits )
+  {
+    return "(void)";
+  }
+
+  using BufferViewCreateFlags = Flags<BufferViewCreateFlagBits>;
+
+  VULKAN_HPP_INLINE std::string to_string( BufferViewCreateFlags  )
+  {
+
+    return "{}";
+  }
+
+
+  using BuildAccelerationStructureFlagsKHR = Flags<BuildAccelerationStructureFlagBitsKHR>;
+
+  template <> struct FlagTraits<BuildAccelerationStructureFlagBitsKHR>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(BuildAccelerationStructureFlagBitsKHR::eAllowUpdate) | VkFlags(BuildAccelerationStructureFlagBitsKHR::eAllowCompaction) | VkFlags(BuildAccelerationStructureFlagBitsKHR::ePreferFastTrace) | VkFlags(BuildAccelerationStructureFlagBitsKHR::ePreferFastBuild) | VkFlags(BuildAccelerationStructureFlagBitsKHR::eLowMemory)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsKHR operator|( BuildAccelerationStructureFlagBitsKHR bit0, BuildAccelerationStructureFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return BuildAccelerationStructureFlagsKHR( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsKHR operator&( BuildAccelerationStructureFlagBitsKHR bit0, BuildAccelerationStructureFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return BuildAccelerationStructureFlagsKHR( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsKHR operator^( BuildAccelerationStructureFlagBitsKHR bit0, BuildAccelerationStructureFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return BuildAccelerationStructureFlagsKHR( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsKHR operator~( BuildAccelerationStructureFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( BuildAccelerationStructureFlagsKHR( bits ) );
+  }
+
+  using BuildAccelerationStructureFlagsNV = BuildAccelerationStructureFlagsKHR;
+
+  VULKAN_HPP_INLINE std::string to_string( BuildAccelerationStructureFlagsKHR value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & BuildAccelerationStructureFlagBitsKHR::eAllowUpdate ) result += "AllowUpdate | ";
+    if ( value & BuildAccelerationStructureFlagBitsKHR::eAllowCompaction ) result += "AllowCompaction | ";
+    if ( value & BuildAccelerationStructureFlagBitsKHR::ePreferFastTrace ) result += "PreferFastTrace | ";
+    if ( value & BuildAccelerationStructureFlagBitsKHR::ePreferFastBuild ) result += "PreferFastBuild | ";
+    if ( value & BuildAccelerationStructureFlagBitsKHR::eLowMemory ) result += "LowMemory | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+
+  using ColorComponentFlags = Flags<ColorComponentFlagBits>;
+
+  template <> struct FlagTraits<ColorComponentFlagBits>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(ColorComponentFlagBits::eR) | VkFlags(ColorComponentFlagBits::eG) | VkFlags(ColorComponentFlagBits::eB) | VkFlags(ColorComponentFlagBits::eA)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ColorComponentFlags operator|( ColorComponentFlagBits bit0, ColorComponentFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ColorComponentFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ColorComponentFlags operator&( ColorComponentFlagBits bit0, ColorComponentFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ColorComponentFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ColorComponentFlags operator^( ColorComponentFlagBits bit0, ColorComponentFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ColorComponentFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ColorComponentFlags operator~( ColorComponentFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( ColorComponentFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( ColorComponentFlags value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & ColorComponentFlagBits::eR ) result += "R | ";
+    if ( value & ColorComponentFlagBits::eG ) result += "G | ";
+    if ( value & ColorComponentFlagBits::eB ) result += "B | ";
+    if ( value & ColorComponentFlagBits::eA ) result += "A | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+
+  using CommandBufferResetFlags = Flags<CommandBufferResetFlagBits>;
+
+  template <> struct FlagTraits<CommandBufferResetFlagBits>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(CommandBufferResetFlagBits::eReleaseResources)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferResetFlags operator|( CommandBufferResetFlagBits bit0, CommandBufferResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return CommandBufferResetFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferResetFlags operator&( CommandBufferResetFlagBits bit0, CommandBufferResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return CommandBufferResetFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferResetFlags operator^( CommandBufferResetFlagBits bit0, CommandBufferResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return CommandBufferResetFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferResetFlags operator~( CommandBufferResetFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( CommandBufferResetFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( CommandBufferResetFlags value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & CommandBufferResetFlagBits::eReleaseResources ) result += "ReleaseResources | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+
+  using CommandBufferUsageFlags = Flags<CommandBufferUsageFlagBits>;
+
+  template <> struct FlagTraits<CommandBufferUsageFlagBits>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(CommandBufferUsageFlagBits::eOneTimeSubmit) | VkFlags(CommandBufferUsageFlagBits::eRenderPassContinue) | VkFlags(CommandBufferUsageFlagBits::eSimultaneousUse)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferUsageFlags operator|( CommandBufferUsageFlagBits bit0, CommandBufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return CommandBufferUsageFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferUsageFlags operator&( CommandBufferUsageFlagBits bit0, CommandBufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return CommandBufferUsageFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferUsageFlags operator^( CommandBufferUsageFlagBits bit0, CommandBufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return CommandBufferUsageFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferUsageFlags operator~( CommandBufferUsageFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( CommandBufferUsageFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( CommandBufferUsageFlags value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & CommandBufferUsageFlagBits::eOneTimeSubmit ) result += "OneTimeSubmit | ";
+    if ( value & CommandBufferUsageFlagBits::eRenderPassContinue ) result += "RenderPassContinue | ";
+    if ( value & CommandBufferUsageFlagBits::eSimultaneousUse ) result += "SimultaneousUse | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+
+  using CommandPoolCreateFlags = Flags<CommandPoolCreateFlagBits>;
+
+  template <> struct FlagTraits<CommandPoolCreateFlagBits>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(CommandPoolCreateFlagBits::eTransient) | VkFlags(CommandPoolCreateFlagBits::eResetCommandBuffer) | VkFlags(CommandPoolCreateFlagBits::eProtected)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolCreateFlags operator|( CommandPoolCreateFlagBits bit0, CommandPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return CommandPoolCreateFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolCreateFlags operator&( CommandPoolCreateFlagBits bit0, CommandPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return CommandPoolCreateFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolCreateFlags operator^( CommandPoolCreateFlagBits bit0, CommandPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return CommandPoolCreateFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolCreateFlags operator~( CommandPoolCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( CommandPoolCreateFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( CommandPoolCreateFlags value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & CommandPoolCreateFlagBits::eTransient ) result += "Transient | ";
+    if ( value & CommandPoolCreateFlagBits::eResetCommandBuffer ) result += "ResetCommandBuffer | ";
+    if ( value & CommandPoolCreateFlagBits::eProtected ) result += "Protected | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+
+  using CommandPoolResetFlags = Flags<CommandPoolResetFlagBits>;
+
+  template <> struct FlagTraits<CommandPoolResetFlagBits>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(CommandPoolResetFlagBits::eReleaseResources)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolResetFlags operator|( CommandPoolResetFlagBits bit0, CommandPoolResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return CommandPoolResetFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolResetFlags operator&( CommandPoolResetFlagBits bit0, CommandPoolResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return CommandPoolResetFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolResetFlags operator^( CommandPoolResetFlagBits bit0, CommandPoolResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return CommandPoolResetFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolResetFlags operator~( CommandPoolResetFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( CommandPoolResetFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( CommandPoolResetFlags value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & CommandPoolResetFlagBits::eReleaseResources ) result += "ReleaseResources | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class CommandPoolTrimFlagBits : VkFlags
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( CommandPoolTrimFlagBits )
+  {
+    return "(void)";
+  }
+
+  using CommandPoolTrimFlags = Flags<CommandPoolTrimFlagBits>;
+
+  using CommandPoolTrimFlagsKHR = CommandPoolTrimFlags;
+
+  VULKAN_HPP_INLINE std::string to_string( CommandPoolTrimFlags  )
+  {
+
+    return "{}";
+  }
+
+
+  using CompositeAlphaFlagsKHR = Flags<CompositeAlphaFlagBitsKHR>;
+
+  template <> struct FlagTraits<CompositeAlphaFlagBitsKHR>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(CompositeAlphaFlagBitsKHR::eOpaque) | VkFlags(CompositeAlphaFlagBitsKHR::ePreMultiplied) | VkFlags(CompositeAlphaFlagBitsKHR::ePostMultiplied) | VkFlags(CompositeAlphaFlagBitsKHR::eInherit)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CompositeAlphaFlagsKHR operator|( CompositeAlphaFlagBitsKHR bit0, CompositeAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return CompositeAlphaFlagsKHR( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CompositeAlphaFlagsKHR operator&( CompositeAlphaFlagBitsKHR bit0, CompositeAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return CompositeAlphaFlagsKHR( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CompositeAlphaFlagsKHR operator^( CompositeAlphaFlagBitsKHR bit0, CompositeAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return CompositeAlphaFlagsKHR( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CompositeAlphaFlagsKHR operator~( CompositeAlphaFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( CompositeAlphaFlagsKHR( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( CompositeAlphaFlagsKHR value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & CompositeAlphaFlagBitsKHR::eOpaque ) result += "Opaque | ";
+    if ( value & CompositeAlphaFlagBitsKHR::ePreMultiplied ) result += "PreMultiplied | ";
+    if ( value & CompositeAlphaFlagBitsKHR::ePostMultiplied ) result += "PostMultiplied | ";
+    if ( value & CompositeAlphaFlagBitsKHR::eInherit ) result += "Inherit | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+
+  using ConditionalRenderingFlagsEXT = Flags<ConditionalRenderingFlagBitsEXT>;
+
+  template <> struct FlagTraits<ConditionalRenderingFlagBitsEXT>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(ConditionalRenderingFlagBitsEXT::eInverted)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ConditionalRenderingFlagsEXT operator|( ConditionalRenderingFlagBitsEXT bit0, ConditionalRenderingFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ConditionalRenderingFlagsEXT( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ConditionalRenderingFlagsEXT operator&( ConditionalRenderingFlagBitsEXT bit0, ConditionalRenderingFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ConditionalRenderingFlagsEXT( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ConditionalRenderingFlagsEXT operator^( ConditionalRenderingFlagBitsEXT bit0, ConditionalRenderingFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ConditionalRenderingFlagsEXT( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ConditionalRenderingFlagsEXT operator~( ConditionalRenderingFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( ConditionalRenderingFlagsEXT( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( ConditionalRenderingFlagsEXT value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & ConditionalRenderingFlagBitsEXT::eInverted ) result += "Inverted | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+
+  using CullModeFlags = Flags<CullModeFlagBits>;
+
+  template <> struct FlagTraits<CullModeFlagBits>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(CullModeFlagBits::eNone) | VkFlags(CullModeFlagBits::eFront) | VkFlags(CullModeFlagBits::eBack) | VkFlags(CullModeFlagBits::eFrontAndBack)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CullModeFlags operator|( CullModeFlagBits bit0, CullModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return CullModeFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CullModeFlags operator&( CullModeFlagBits bit0, CullModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return CullModeFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CullModeFlags operator^( CullModeFlagBits bit0, CullModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return CullModeFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CullModeFlags operator~( CullModeFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( CullModeFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( CullModeFlags value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & CullModeFlagBits::eFront ) result += "Front | ";
+    if ( value & CullModeFlagBits::eBack ) result += "Back | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+
+  using DebugReportFlagsEXT = Flags<DebugReportFlagBitsEXT>;
+
+  template <> struct FlagTraits<DebugReportFlagBitsEXT>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(DebugReportFlagBitsEXT::eInformation) | VkFlags(DebugReportFlagBitsEXT::eWarning) | VkFlags(DebugReportFlagBitsEXT::ePerformanceWarning) | VkFlags(DebugReportFlagBitsEXT::eError) | VkFlags(DebugReportFlagBitsEXT::eDebug)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugReportFlagsEXT operator|( DebugReportFlagBitsEXT bit0, DebugReportFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return DebugReportFlagsEXT( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugReportFlagsEXT operator&( DebugReportFlagBitsEXT bit0, DebugReportFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return DebugReportFlagsEXT( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugReportFlagsEXT operator^( DebugReportFlagBitsEXT bit0, DebugReportFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return DebugReportFlagsEXT( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugReportFlagsEXT operator~( DebugReportFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( DebugReportFlagsEXT( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( DebugReportFlagsEXT value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & DebugReportFlagBitsEXT::eInformation ) result += "Information | ";
+    if ( value & DebugReportFlagBitsEXT::eWarning ) result += "Warning | ";
+    if ( value & DebugReportFlagBitsEXT::ePerformanceWarning ) result += "PerformanceWarning | ";
+    if ( value & DebugReportFlagBitsEXT::eError ) result += "Error | ";
+    if ( value & DebugReportFlagBitsEXT::eDebug ) result += "Debug | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+
+  using DebugUtilsMessageSeverityFlagsEXT = Flags<DebugUtilsMessageSeverityFlagBitsEXT>;
+
+  template <> struct FlagTraits<DebugUtilsMessageSeverityFlagBitsEXT>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(DebugUtilsMessageSeverityFlagBitsEXT::eVerbose) | VkFlags(DebugUtilsMessageSeverityFlagBitsEXT::eInfo) | VkFlags(DebugUtilsMessageSeverityFlagBitsEXT::eWarning) | VkFlags(DebugUtilsMessageSeverityFlagBitsEXT::eError)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageSeverityFlagsEXT operator|( DebugUtilsMessageSeverityFlagBitsEXT bit0, DebugUtilsMessageSeverityFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return DebugUtilsMessageSeverityFlagsEXT( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageSeverityFlagsEXT operator&( DebugUtilsMessageSeverityFlagBitsEXT bit0, DebugUtilsMessageSeverityFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return DebugUtilsMessageSeverityFlagsEXT( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageSeverityFlagsEXT operator^( DebugUtilsMessageSeverityFlagBitsEXT bit0, DebugUtilsMessageSeverityFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return DebugUtilsMessageSeverityFlagsEXT( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageSeverityFlagsEXT operator~( DebugUtilsMessageSeverityFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( DebugUtilsMessageSeverityFlagsEXT( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageSeverityFlagsEXT value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eVerbose ) result += "Verbose | ";
+    if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eInfo ) result += "Info | ";
+    if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eWarning ) result += "Warning | ";
+    if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eError ) result += "Error | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+
+  using DebugUtilsMessageTypeFlagsEXT = Flags<DebugUtilsMessageTypeFlagBitsEXT>;
+
+  template <> struct FlagTraits<DebugUtilsMessageTypeFlagBitsEXT>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(DebugUtilsMessageTypeFlagBitsEXT::eGeneral) | VkFlags(DebugUtilsMessageTypeFlagBitsEXT::eValidation) | VkFlags(DebugUtilsMessageTypeFlagBitsEXT::ePerformance)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageTypeFlagsEXT operator|( DebugUtilsMessageTypeFlagBitsEXT bit0, DebugUtilsMessageTypeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return DebugUtilsMessageTypeFlagsEXT( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageTypeFlagsEXT operator&( DebugUtilsMessageTypeFlagBitsEXT bit0, DebugUtilsMessageTypeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return DebugUtilsMessageTypeFlagsEXT( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageTypeFlagsEXT operator^( DebugUtilsMessageTypeFlagBitsEXT bit0, DebugUtilsMessageTypeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return DebugUtilsMessageTypeFlagsEXT( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageTypeFlagsEXT operator~( DebugUtilsMessageTypeFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( DebugUtilsMessageTypeFlagsEXT( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageTypeFlagsEXT value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & DebugUtilsMessageTypeFlagBitsEXT::eGeneral ) result += "General | ";
+    if ( value & DebugUtilsMessageTypeFlagBitsEXT::eValidation ) result += "Validation | ";
+    if ( value & DebugUtilsMessageTypeFlagBitsEXT::ePerformance ) result += "Performance | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class DebugUtilsMessengerCallbackDataFlagBitsEXT : VkFlags
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCallbackDataFlagBitsEXT )
+  {
+    return "(void)";
+  }
+
+  using DebugUtilsMessengerCallbackDataFlagsEXT = Flags<DebugUtilsMessengerCallbackDataFlagBitsEXT>;
+
+  VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCallbackDataFlagsEXT  )
+  {
+
+    return "{}";
+  }
+
+  enum class DebugUtilsMessengerCreateFlagBitsEXT : VkFlags
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCreateFlagBitsEXT )
+  {
+    return "(void)";
+  }
+
+  using DebugUtilsMessengerCreateFlagsEXT = Flags<DebugUtilsMessengerCreateFlagBitsEXT>;
+
+  VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCreateFlagsEXT  )
+  {
+
+    return "{}";
+  }
+
+
+  using DependencyFlags = Flags<DependencyFlagBits>;
+
+  template <> struct FlagTraits<DependencyFlagBits>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(DependencyFlagBits::eByRegion) | VkFlags(DependencyFlagBits::eDeviceGroup) | VkFlags(DependencyFlagBits::eViewLocal)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DependencyFlags operator|( DependencyFlagBits bit0, DependencyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return DependencyFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DependencyFlags operator&( DependencyFlagBits bit0, DependencyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return DependencyFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DependencyFlags operator^( DependencyFlagBits bit0, DependencyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return DependencyFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DependencyFlags operator~( DependencyFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( DependencyFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( DependencyFlags value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & DependencyFlagBits::eByRegion ) result += "ByRegion | ";
+    if ( value & DependencyFlagBits::eDeviceGroup ) result += "DeviceGroup | ";
+    if ( value & DependencyFlagBits::eViewLocal ) result += "ViewLocal | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+
+  using DescriptorBindingFlags = Flags<DescriptorBindingFlagBits>;
+
+  template <> struct FlagTraits<DescriptorBindingFlagBits>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(DescriptorBindingFlagBits::eUpdateAfterBind) | VkFlags(DescriptorBindingFlagBits::eUpdateUnusedWhilePending) | VkFlags(DescriptorBindingFlagBits::ePartiallyBound) | VkFlags(DescriptorBindingFlagBits::eVariableDescriptorCount)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorBindingFlags operator|( DescriptorBindingFlagBits bit0, DescriptorBindingFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return DescriptorBindingFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorBindingFlags operator&( DescriptorBindingFlagBits bit0, DescriptorBindingFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return DescriptorBindingFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorBindingFlags operator^( DescriptorBindingFlagBits bit0, DescriptorBindingFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return DescriptorBindingFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorBindingFlags operator~( DescriptorBindingFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( DescriptorBindingFlags( bits ) );
+  }
+
+  using DescriptorBindingFlagsEXT = DescriptorBindingFlags;
+
+  VULKAN_HPP_INLINE std::string to_string( DescriptorBindingFlags value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & DescriptorBindingFlagBits::eUpdateAfterBind ) result += "UpdateAfterBind | ";
+    if ( value & DescriptorBindingFlagBits::eUpdateUnusedWhilePending ) result += "UpdateUnusedWhilePending | ";
+    if ( value & DescriptorBindingFlagBits::ePartiallyBound ) result += "PartiallyBound | ";
+    if ( value & DescriptorBindingFlagBits::eVariableDescriptorCount ) result += "VariableDescriptorCount | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+
+  using DescriptorPoolCreateFlags = Flags<DescriptorPoolCreateFlagBits>;
+
+  template <> struct FlagTraits<DescriptorPoolCreateFlagBits>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(DescriptorPoolCreateFlagBits::eFreeDescriptorSet) | VkFlags(DescriptorPoolCreateFlagBits::eUpdateAfterBind)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorPoolCreateFlags operator|( DescriptorPoolCreateFlagBits bit0, DescriptorPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return DescriptorPoolCreateFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorPoolCreateFlags operator&( DescriptorPoolCreateFlagBits bit0, DescriptorPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return DescriptorPoolCreateFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorPoolCreateFlags operator^( DescriptorPoolCreateFlagBits bit0, DescriptorPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return DescriptorPoolCreateFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorPoolCreateFlags operator~( DescriptorPoolCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( DescriptorPoolCreateFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( DescriptorPoolCreateFlags value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & DescriptorPoolCreateFlagBits::eFreeDescriptorSet ) result += "FreeDescriptorSet | ";
+    if ( value & DescriptorPoolCreateFlagBits::eUpdateAfterBind ) result += "UpdateAfterBind | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class DescriptorPoolResetFlagBits : VkFlags
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( DescriptorPoolResetFlagBits )
+  {
+    return "(void)";
+  }
+
+  using DescriptorPoolResetFlags = Flags<DescriptorPoolResetFlagBits>;
+
+  VULKAN_HPP_INLINE std::string to_string( DescriptorPoolResetFlags  )
+  {
+
+    return "{}";
+  }
+
+
+  using DescriptorSetLayoutCreateFlags = Flags<DescriptorSetLayoutCreateFlagBits>;
+
+  template <> struct FlagTraits<DescriptorSetLayoutCreateFlagBits>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPool) | VkFlags(DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateFlags operator|( DescriptorSetLayoutCreateFlagBits bit0, DescriptorSetLayoutCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return DescriptorSetLayoutCreateFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateFlags operator&( DescriptorSetLayoutCreateFlagBits bit0, DescriptorSetLayoutCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return DescriptorSetLayoutCreateFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateFlags operator^( DescriptorSetLayoutCreateFlagBits bit0, DescriptorSetLayoutCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return DescriptorSetLayoutCreateFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateFlags operator~( DescriptorSetLayoutCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( DescriptorSetLayoutCreateFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( DescriptorSetLayoutCreateFlags value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPool ) result += "UpdateAfterBindPool | ";
+    if ( value & DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR ) result += "PushDescriptorKHR | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class DescriptorUpdateTemplateCreateFlagBits : VkFlags
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( DescriptorUpdateTemplateCreateFlagBits )
+  {
+    return "(void)";
+  }
+
+  using DescriptorUpdateTemplateCreateFlags = Flags<DescriptorUpdateTemplateCreateFlagBits>;
+
+  using DescriptorUpdateTemplateCreateFlagsKHR = DescriptorUpdateTemplateCreateFlags;
+
+  VULKAN_HPP_INLINE std::string to_string( DescriptorUpdateTemplateCreateFlags  )
+  {
+
+    return "{}";
+  }
+
+
+  using DeviceCreateFlags = Flags<DeviceCreateFlagBits>;
+
+  VULKAN_HPP_INLINE std::string to_string( DeviceCreateFlags  )
+  {
+
+    return "{}";
+  }
+
+
+  using DeviceDiagnosticsConfigFlagsNV = Flags<DeviceDiagnosticsConfigFlagBitsNV>;
+
+  template <> struct FlagTraits<DeviceDiagnosticsConfigFlagBitsNV>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderDebugInfo) | VkFlags(DeviceDiagnosticsConfigFlagBitsNV::eEnableResourceTracking) | VkFlags(DeviceDiagnosticsConfigFlagBitsNV::eEnableAutomaticCheckpoints)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigFlagsNV operator|( DeviceDiagnosticsConfigFlagBitsNV bit0, DeviceDiagnosticsConfigFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return DeviceDiagnosticsConfigFlagsNV( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigFlagsNV operator&( DeviceDiagnosticsConfigFlagBitsNV bit0, DeviceDiagnosticsConfigFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return DeviceDiagnosticsConfigFlagsNV( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigFlagsNV operator^( DeviceDiagnosticsConfigFlagBitsNV bit0, DeviceDiagnosticsConfigFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return DeviceDiagnosticsConfigFlagsNV( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigFlagsNV operator~( DeviceDiagnosticsConfigFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( DeviceDiagnosticsConfigFlagsNV( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( DeviceDiagnosticsConfigFlagsNV value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderDebugInfo ) result += "EnableShaderDebugInfo | ";
+    if ( value & DeviceDiagnosticsConfigFlagBitsNV::eEnableResourceTracking ) result += "EnableResourceTracking | ";
+    if ( value & DeviceDiagnosticsConfigFlagBitsNV::eEnableAutomaticCheckpoints ) result += "EnableAutomaticCheckpoints | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+
+  using DeviceGroupPresentModeFlagsKHR = Flags<DeviceGroupPresentModeFlagBitsKHR>;
+
+  template <> struct FlagTraits<DeviceGroupPresentModeFlagBitsKHR>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(DeviceGroupPresentModeFlagBitsKHR::eLocal) | VkFlags(DeviceGroupPresentModeFlagBitsKHR::eRemote) | VkFlags(DeviceGroupPresentModeFlagBitsKHR::eSum) | VkFlags(DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceGroupPresentModeFlagsKHR operator|( DeviceGroupPresentModeFlagBitsKHR bit0, DeviceGroupPresentModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return DeviceGroupPresentModeFlagsKHR( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceGroupPresentModeFlagsKHR operator&( DeviceGroupPresentModeFlagBitsKHR bit0, DeviceGroupPresentModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return DeviceGroupPresentModeFlagsKHR( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceGroupPresentModeFlagsKHR operator^( DeviceGroupPresentModeFlagBitsKHR bit0, DeviceGroupPresentModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return DeviceGroupPresentModeFlagsKHR( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceGroupPresentModeFlagsKHR operator~( DeviceGroupPresentModeFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( DeviceGroupPresentModeFlagsKHR( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( DeviceGroupPresentModeFlagsKHR value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & DeviceGroupPresentModeFlagBitsKHR::eLocal ) result += "Local | ";
+    if ( value & DeviceGroupPresentModeFlagBitsKHR::eRemote ) result += "Remote | ";
+    if ( value & DeviceGroupPresentModeFlagBitsKHR::eSum ) result += "Sum | ";
+    if ( value & DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice ) result += "LocalMultiDevice | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class DeviceMemoryReportFlagBitsEXT : VkFlags
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( DeviceMemoryReportFlagBitsEXT )
+  {
+    return "(void)";
+  }
+
+  using DeviceMemoryReportFlagsEXT = Flags<DeviceMemoryReportFlagBitsEXT>;
+
+  VULKAN_HPP_INLINE std::string to_string( DeviceMemoryReportFlagsEXT  )
+  {
+
+    return "{}";
+  }
+
+
+  using DeviceQueueCreateFlags = Flags<DeviceQueueCreateFlagBits>;
+
+  template <> struct FlagTraits<DeviceQueueCreateFlagBits>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(DeviceQueueCreateFlagBits::eProtected)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceQueueCreateFlags operator|( DeviceQueueCreateFlagBits bit0, DeviceQueueCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return DeviceQueueCreateFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceQueueCreateFlags operator&( DeviceQueueCreateFlagBits bit0, DeviceQueueCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return DeviceQueueCreateFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceQueueCreateFlags operator^( DeviceQueueCreateFlagBits bit0, DeviceQueueCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return DeviceQueueCreateFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceQueueCreateFlags operator~( DeviceQueueCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( DeviceQueueCreateFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( DeviceQueueCreateFlags value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & DeviceQueueCreateFlagBits::eProtected ) result += "Protected | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+#ifdef VK_USE_PLATFORM_DIRECTFB_EXT
+  enum class DirectFBSurfaceCreateFlagBitsEXT : VkFlags
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( DirectFBSurfaceCreateFlagBitsEXT )
+  {
+    return "(void)";
+  }
+
+  using DirectFBSurfaceCreateFlagsEXT = Flags<DirectFBSurfaceCreateFlagBitsEXT>;
+
+  VULKAN_HPP_INLINE std::string to_string( DirectFBSurfaceCreateFlagsEXT  )
+  {
+
+    return "{}";
+  }
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
+
+  enum class DisplayModeCreateFlagBitsKHR : VkFlags
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( DisplayModeCreateFlagBitsKHR )
+  {
+    return "(void)";
+  }
+
+  using DisplayModeCreateFlagsKHR = Flags<DisplayModeCreateFlagBitsKHR>;
+
+  VULKAN_HPP_INLINE std::string to_string( DisplayModeCreateFlagsKHR  )
+  {
+
+    return "{}";
+  }
+
+
+  using DisplayPlaneAlphaFlagsKHR = Flags<DisplayPlaneAlphaFlagBitsKHR>;
+
+  template <> struct FlagTraits<DisplayPlaneAlphaFlagBitsKHR>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(DisplayPlaneAlphaFlagBitsKHR::eOpaque) | VkFlags(DisplayPlaneAlphaFlagBitsKHR::eGlobal) | VkFlags(DisplayPlaneAlphaFlagBitsKHR::ePerPixel) | VkFlags(DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DisplayPlaneAlphaFlagsKHR operator|( DisplayPlaneAlphaFlagBitsKHR bit0, DisplayPlaneAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return DisplayPlaneAlphaFlagsKHR( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DisplayPlaneAlphaFlagsKHR operator&( DisplayPlaneAlphaFlagBitsKHR bit0, DisplayPlaneAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return DisplayPlaneAlphaFlagsKHR( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DisplayPlaneAlphaFlagsKHR operator^( DisplayPlaneAlphaFlagBitsKHR bit0, DisplayPlaneAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return DisplayPlaneAlphaFlagsKHR( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DisplayPlaneAlphaFlagsKHR operator~( DisplayPlaneAlphaFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( DisplayPlaneAlphaFlagsKHR( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( DisplayPlaneAlphaFlagsKHR value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & DisplayPlaneAlphaFlagBitsKHR::eOpaque ) result += "Opaque | ";
+    if ( value & DisplayPlaneAlphaFlagBitsKHR::eGlobal ) result += "Global | ";
+    if ( value & DisplayPlaneAlphaFlagBitsKHR::ePerPixel ) result += "PerPixel | ";
+    if ( value & DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied ) result += "PerPixelPremultiplied | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class DisplaySurfaceCreateFlagBitsKHR : VkFlags
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( DisplaySurfaceCreateFlagBitsKHR )
+  {
+    return "(void)";
+  }
+
+  using DisplaySurfaceCreateFlagsKHR = Flags<DisplaySurfaceCreateFlagBitsKHR>;
+
+  VULKAN_HPP_INLINE std::string to_string( DisplaySurfaceCreateFlagsKHR  )
+  {
+
+    return "{}";
+  }
+
+  enum class EventCreateFlagBits : VkFlags
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( EventCreateFlagBits )
+  {
+    return "(void)";
+  }
+
+  using EventCreateFlags = Flags<EventCreateFlagBits>;
+
+  VULKAN_HPP_INLINE std::string to_string( EventCreateFlags  )
+  {
+
+    return "{}";
+  }
+
+
+  using ExternalFenceFeatureFlags = Flags<ExternalFenceFeatureFlagBits>;
+
+  template <> struct FlagTraits<ExternalFenceFeatureFlagBits>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(ExternalFenceFeatureFlagBits::eExportable) | VkFlags(ExternalFenceFeatureFlagBits::eImportable)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceFeatureFlags operator|( ExternalFenceFeatureFlagBits bit0, ExternalFenceFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ExternalFenceFeatureFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceFeatureFlags operator&( ExternalFenceFeatureFlagBits bit0, ExternalFenceFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ExternalFenceFeatureFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceFeatureFlags operator^( ExternalFenceFeatureFlagBits bit0, ExternalFenceFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ExternalFenceFeatureFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceFeatureFlags operator~( ExternalFenceFeatureFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( ExternalFenceFeatureFlags( bits ) );
+  }
+
+  using ExternalFenceFeatureFlagsKHR = ExternalFenceFeatureFlags;
+
+  VULKAN_HPP_INLINE std::string to_string( ExternalFenceFeatureFlags value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & ExternalFenceFeatureFlagBits::eExportable ) result += "Exportable | ";
+    if ( value & ExternalFenceFeatureFlagBits::eImportable ) result += "Importable | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+
+  using ExternalFenceHandleTypeFlags = Flags<ExternalFenceHandleTypeFlagBits>;
+
+  template <> struct FlagTraits<ExternalFenceHandleTypeFlagBits>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(ExternalFenceHandleTypeFlagBits::eOpaqueFd) | VkFlags(ExternalFenceHandleTypeFlagBits::eOpaqueWin32) | VkFlags(ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt) | VkFlags(ExternalFenceHandleTypeFlagBits::eSyncFd)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceHandleTypeFlags operator|( ExternalFenceHandleTypeFlagBits bit0, ExternalFenceHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ExternalFenceHandleTypeFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceHandleTypeFlags operator&( ExternalFenceHandleTypeFlagBits bit0, ExternalFenceHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ExternalFenceHandleTypeFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceHandleTypeFlags operator^( ExternalFenceHandleTypeFlagBits bit0, ExternalFenceHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ExternalFenceHandleTypeFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceHandleTypeFlags operator~( ExternalFenceHandleTypeFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( ExternalFenceHandleTypeFlags( bits ) );
+  }
+
+  using ExternalFenceHandleTypeFlagsKHR = ExternalFenceHandleTypeFlags;
+
+  VULKAN_HPP_INLINE std::string to_string( ExternalFenceHandleTypeFlags value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & ExternalFenceHandleTypeFlagBits::eOpaqueFd ) result += "OpaqueFd | ";
+    if ( value & ExternalFenceHandleTypeFlagBits::eOpaqueWin32 ) result += "OpaqueWin32 | ";
+    if ( value & ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt ) result += "OpaqueWin32Kmt | ";
+    if ( value & ExternalFenceHandleTypeFlagBits::eSyncFd ) result += "SyncFd | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+
+  using ExternalMemoryFeatureFlags = Flags<ExternalMemoryFeatureFlagBits>;
+
+  template <> struct FlagTraits<ExternalMemoryFeatureFlagBits>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(ExternalMemoryFeatureFlagBits::eDedicatedOnly) | VkFlags(ExternalMemoryFeatureFlagBits::eExportable) | VkFlags(ExternalMemoryFeatureFlagBits::eImportable)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlags operator|( ExternalMemoryFeatureFlagBits bit0, ExternalMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ExternalMemoryFeatureFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlags operator&( ExternalMemoryFeatureFlagBits bit0, ExternalMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ExternalMemoryFeatureFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlags operator^( ExternalMemoryFeatureFlagBits bit0, ExternalMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ExternalMemoryFeatureFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlags operator~( ExternalMemoryFeatureFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( ExternalMemoryFeatureFlags( bits ) );
+  }
+
+  using ExternalMemoryFeatureFlagsKHR = ExternalMemoryFeatureFlags;
+
+  VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlags value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & ExternalMemoryFeatureFlagBits::eDedicatedOnly ) result += "DedicatedOnly | ";
+    if ( value & ExternalMemoryFeatureFlagBits::eExportable ) result += "Exportable | ";
+    if ( value & ExternalMemoryFeatureFlagBits::eImportable ) result += "Importable | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+
+  using ExternalMemoryFeatureFlagsNV = Flags<ExternalMemoryFeatureFlagBitsNV>;
+
+  template <> struct FlagTraits<ExternalMemoryFeatureFlagBitsNV>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly) | VkFlags(ExternalMemoryFeatureFlagBitsNV::eExportable) | VkFlags(ExternalMemoryFeatureFlagBitsNV::eImportable)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlagsNV operator|( ExternalMemoryFeatureFlagBitsNV bit0, ExternalMemoryFeatureFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ExternalMemoryFeatureFlagsNV( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlagsNV operator&( ExternalMemoryFeatureFlagBitsNV bit0, ExternalMemoryFeatureFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ExternalMemoryFeatureFlagsNV( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlagsNV operator^( ExternalMemoryFeatureFlagBitsNV bit0, ExternalMemoryFeatureFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ExternalMemoryFeatureFlagsNV( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlagsNV operator~( ExternalMemoryFeatureFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( ExternalMemoryFeatureFlagsNV( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlagsNV value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly ) result += "DedicatedOnly | ";
+    if ( value & ExternalMemoryFeatureFlagBitsNV::eExportable ) result += "Exportable | ";
+    if ( value & ExternalMemoryFeatureFlagBitsNV::eImportable ) result += "Importable | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+
+  using ExternalMemoryHandleTypeFlags = Flags<ExternalMemoryHandleTypeFlagBits>;
+
+  template <> struct FlagTraits<ExternalMemoryHandleTypeFlagBits>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(ExternalMemoryHandleTypeFlagBits::eOpaqueFd) | VkFlags(ExternalMemoryHandleTypeFlagBits::eOpaqueWin32) | VkFlags(ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt) | VkFlags(ExternalMemoryHandleTypeFlagBits::eD3D11Texture) | VkFlags(ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt) | VkFlags(ExternalMemoryHandleTypeFlagBits::eD3D12Heap) | VkFlags(ExternalMemoryHandleTypeFlagBits::eD3D12Resource) | VkFlags(ExternalMemoryHandleTypeFlagBits::eDmaBufEXT) | VkFlags(ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID) | VkFlags(ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT) | VkFlags(ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlags operator|( ExternalMemoryHandleTypeFlagBits bit0, ExternalMemoryHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ExternalMemoryHandleTypeFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlags operator&( ExternalMemoryHandleTypeFlagBits bit0, ExternalMemoryHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ExternalMemoryHandleTypeFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlags operator^( ExternalMemoryHandleTypeFlagBits bit0, ExternalMemoryHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ExternalMemoryHandleTypeFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlags operator~( ExternalMemoryHandleTypeFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( ExternalMemoryHandleTypeFlags( bits ) );
+  }
+
+  using ExternalMemoryHandleTypeFlagsKHR = ExternalMemoryHandleTypeFlags;
+
+  VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlags value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) result += "OpaqueFd | ";
+    if ( value & ExternalMemoryHandleTypeFlagBits::eOpaqueWin32 ) result += "OpaqueWin32 | ";
+    if ( value & ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt ) result += "OpaqueWin32Kmt | ";
+    if ( value & ExternalMemoryHandleTypeFlagBits::eD3D11Texture ) result += "D3D11Texture | ";
+    if ( value & ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt ) result += "D3D11TextureKmt | ";
+    if ( value & ExternalMemoryHandleTypeFlagBits::eD3D12Heap ) result += "D3D12Heap | ";
+    if ( value & ExternalMemoryHandleTypeFlagBits::eD3D12Resource ) result += "D3D12Resource | ";
+    if ( value & ExternalMemoryHandleTypeFlagBits::eDmaBufEXT ) result += "DmaBufEXT | ";
+    if ( value & ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID ) result += "AndroidHardwareBufferANDROID | ";
+    if ( value & ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT ) result += "HostAllocationEXT | ";
+    if ( value & ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT ) result += "HostMappedForeignMemoryEXT | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+
+  using ExternalMemoryHandleTypeFlagsNV = Flags<ExternalMemoryHandleTypeFlagBitsNV>;
+
+  template <> struct FlagTraits<ExternalMemoryHandleTypeFlagBitsNV>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32) | VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt) | VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image) | VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlagsNV operator|( ExternalMemoryHandleTypeFlagBitsNV bit0, ExternalMemoryHandleTypeFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ExternalMemoryHandleTypeFlagsNV( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlagsNV operator&( ExternalMemoryHandleTypeFlagBitsNV bit0, ExternalMemoryHandleTypeFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ExternalMemoryHandleTypeFlagsNV( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlagsNV operator^( ExternalMemoryHandleTypeFlagBitsNV bit0, ExternalMemoryHandleTypeFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ExternalMemoryHandleTypeFlagsNV( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlagsNV operator~( ExternalMemoryHandleTypeFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( ExternalMemoryHandleTypeFlagsNV( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlagsNV value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32 ) result += "OpaqueWin32 | ";
+    if ( value & ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt ) result += "OpaqueWin32Kmt | ";
+    if ( value & ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image ) result += "D3D11Image | ";
+    if ( value & ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt ) result += "D3D11ImageKmt | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+
+  using ExternalSemaphoreFeatureFlags = Flags<ExternalSemaphoreFeatureFlagBits>;
+
+  template <> struct FlagTraits<ExternalSemaphoreFeatureFlagBits>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(ExternalSemaphoreFeatureFlagBits::eExportable) | VkFlags(ExternalSemaphoreFeatureFlagBits::eImportable)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreFeatureFlags operator|( ExternalSemaphoreFeatureFlagBits bit0, ExternalSemaphoreFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ExternalSemaphoreFeatureFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreFeatureFlags operator&( ExternalSemaphoreFeatureFlagBits bit0, ExternalSemaphoreFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ExternalSemaphoreFeatureFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreFeatureFlags operator^( ExternalSemaphoreFeatureFlagBits bit0, ExternalSemaphoreFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ExternalSemaphoreFeatureFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreFeatureFlags operator~( ExternalSemaphoreFeatureFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( ExternalSemaphoreFeatureFlags( bits ) );
+  }
+
+  using ExternalSemaphoreFeatureFlagsKHR = ExternalSemaphoreFeatureFlags;
+
+  VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreFeatureFlags value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & ExternalSemaphoreFeatureFlagBits::eExportable ) result += "Exportable | ";
+    if ( value & ExternalSemaphoreFeatureFlagBits::eImportable ) result += "Importable | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+
+  using ExternalSemaphoreHandleTypeFlags = Flags<ExternalSemaphoreHandleTypeFlagBits>;
+
+  template <> struct FlagTraits<ExternalSemaphoreHandleTypeFlagBits>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd) | VkFlags(ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32) | VkFlags(ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt) | VkFlags(ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence) | VkFlags(ExternalSemaphoreHandleTypeFlagBits::eSyncFd)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreHandleTypeFlags operator|( ExternalSemaphoreHandleTypeFlagBits bit0, ExternalSemaphoreHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ExternalSemaphoreHandleTypeFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreHandleTypeFlags operator&( ExternalSemaphoreHandleTypeFlagBits bit0, ExternalSemaphoreHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ExternalSemaphoreHandleTypeFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreHandleTypeFlags operator^( ExternalSemaphoreHandleTypeFlagBits bit0, ExternalSemaphoreHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ExternalSemaphoreHandleTypeFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreHandleTypeFlags operator~( ExternalSemaphoreHandleTypeFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( ExternalSemaphoreHandleTypeFlags( bits ) );
+  }
+
+  using ExternalSemaphoreHandleTypeFlagsKHR = ExternalSemaphoreHandleTypeFlags;
+
+  VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreHandleTypeFlags value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) result += "OpaqueFd | ";
+    if ( value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32 ) result += "OpaqueWin32 | ";
+    if ( value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt ) result += "OpaqueWin32Kmt | ";
+    if ( value & ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence ) result += "D3D12Fence | ";
+    if ( value & ExternalSemaphoreHandleTypeFlagBits::eSyncFd ) result += "SyncFd | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+
+  using FenceCreateFlags = Flags<FenceCreateFlagBits>;
+
+  template <> struct FlagTraits<FenceCreateFlagBits>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(FenceCreateFlagBits::eSignaled)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceCreateFlags operator|( FenceCreateFlagBits bit0, FenceCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return FenceCreateFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceCreateFlags operator&( FenceCreateFlagBits bit0, FenceCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return FenceCreateFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceCreateFlags operator^( FenceCreateFlagBits bit0, FenceCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return FenceCreateFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceCreateFlags operator~( FenceCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( FenceCreateFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( FenceCreateFlags value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & FenceCreateFlagBits::eSignaled ) result += "Signaled | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+
+  using FenceImportFlags = Flags<FenceImportFlagBits>;
+
+  template <> struct FlagTraits<FenceImportFlagBits>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(FenceImportFlagBits::eTemporary)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceImportFlags operator|( FenceImportFlagBits bit0, FenceImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return FenceImportFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceImportFlags operator&( FenceImportFlagBits bit0, FenceImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return FenceImportFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceImportFlags operator^( FenceImportFlagBits bit0, FenceImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return FenceImportFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceImportFlags operator~( FenceImportFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( FenceImportFlags( bits ) );
+  }
+
+  using FenceImportFlagsKHR = FenceImportFlags;
+
+  VULKAN_HPP_INLINE std::string to_string( FenceImportFlags value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & FenceImportFlagBits::eTemporary ) result += "Temporary | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+
+  using FormatFeatureFlags = Flags<FormatFeatureFlagBits>;
+
+  template <> struct FlagTraits<FormatFeatureFlagBits>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(FormatFeatureFlagBits::eSampledImage) | VkFlags(FormatFeatureFlagBits::eStorageImage) | VkFlags(FormatFeatureFlagBits::eStorageImageAtomic) | VkFlags(FormatFeatureFlagBits::eUniformTexelBuffer) | VkFlags(FormatFeatureFlagBits::eStorageTexelBuffer) | VkFlags(FormatFeatureFlagBits::eStorageTexelBufferAtomic) | VkFlags(FormatFeatureFlagBits::eVertexBuffer) | VkFlags(FormatFeatureFlagBits::eColorAttachment) | VkFlags(FormatFeatureFlagBits::eColorAttachmentBlend) | VkFlags(FormatFeatureFlagBits::eDepthStencilAttachment) | VkFlags(FormatFeatureFlagBits::eBlitSrc) | VkFlags(FormatFeatureFlagBits::eBlitDst) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterLinear) | VkFlags(FormatFeatureFlagBits::eTransferSrc) | VkFlags(FormatFeatureFlagBits::eTransferDst) | VkFlags(FormatFeatureFlagBits::eMidpointChromaSamples) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable) | VkFlags(FormatFeatureFlagBits::eDisjoint) | VkFlags(FormatFeatureFlagBits::eCositedChromaSamples) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterMinmax) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterCubicIMG) | VkFlags(FormatFeatureFlagBits::eAccelerationStructureVertexBufferKHR) | VkFlags(FormatFeatureFlagBits::eFragmentDensityMapEXT) | VkFlags(FormatFeatureFlagBits::eFragmentShadingRateAttachmentKHR)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FormatFeatureFlags operator|( FormatFeatureFlagBits bit0, FormatFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return FormatFeatureFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FormatFeatureFlags operator&( FormatFeatureFlagBits bit0, FormatFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return FormatFeatureFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FormatFeatureFlags operator^( FormatFeatureFlagBits bit0, FormatFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return FormatFeatureFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FormatFeatureFlags operator~( FormatFeatureFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( FormatFeatureFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( FormatFeatureFlags value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & FormatFeatureFlagBits::eSampledImage ) result += "SampledImage | ";
+    if ( value & FormatFeatureFlagBits::eStorageImage ) result += "StorageImage | ";
+    if ( value & FormatFeatureFlagBits::eStorageImageAtomic ) result += "StorageImageAtomic | ";
+    if ( value & FormatFeatureFlagBits::eUniformTexelBuffer ) result += "UniformTexelBuffer | ";
+    if ( value & FormatFeatureFlagBits::eStorageTexelBuffer ) result += "StorageTexelBuffer | ";
+    if ( value & FormatFeatureFlagBits::eStorageTexelBufferAtomic ) result += "StorageTexelBufferAtomic | ";
+    if ( value & FormatFeatureFlagBits::eVertexBuffer ) result += "VertexBuffer | ";
+    if ( value & FormatFeatureFlagBits::eColorAttachment ) result += "ColorAttachment | ";
+    if ( value & FormatFeatureFlagBits::eColorAttachmentBlend ) result += "ColorAttachmentBlend | ";
+    if ( value & FormatFeatureFlagBits::eDepthStencilAttachment ) result += "DepthStencilAttachment | ";
+    if ( value & FormatFeatureFlagBits::eBlitSrc ) result += "BlitSrc | ";
+    if ( value & FormatFeatureFlagBits::eBlitDst ) result += "BlitDst | ";
+    if ( value & FormatFeatureFlagBits::eSampledImageFilterLinear ) result += "SampledImageFilterLinear | ";
+    if ( value & FormatFeatureFlagBits::eTransferSrc ) result += "TransferSrc | ";
+    if ( value & FormatFeatureFlagBits::eTransferDst ) result += "TransferDst | ";
+    if ( value & FormatFeatureFlagBits::eMidpointChromaSamples ) result += "MidpointChromaSamples | ";
+    if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter ) result += "SampledImageYcbcrConversionLinearFilter | ";
+    if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter ) result += "SampledImageYcbcrConversionSeparateReconstructionFilter | ";
+    if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit ) result += "SampledImageYcbcrConversionChromaReconstructionExplicit | ";
+    if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable ) result += "SampledImageYcbcrConversionChromaReconstructionExplicitForceable | ";
+    if ( value & FormatFeatureFlagBits::eDisjoint ) result += "Disjoint | ";
+    if ( value & FormatFeatureFlagBits::eCositedChromaSamples ) result += "CositedChromaSamples | ";
+    if ( value & FormatFeatureFlagBits::eSampledImageFilterMinmax ) result += "SampledImageFilterMinmax | ";
+    if ( value & FormatFeatureFlagBits::eSampledImageFilterCubicIMG ) result += "SampledImageFilterCubicIMG | ";
+    if ( value & FormatFeatureFlagBits::eAccelerationStructureVertexBufferKHR ) result += "AccelerationStructureVertexBufferKHR | ";
+    if ( value & FormatFeatureFlagBits::eFragmentDensityMapEXT ) result += "FragmentDensityMapEXT | ";
+    if ( value & FormatFeatureFlagBits::eFragmentShadingRateAttachmentKHR ) result += "FragmentShadingRateAttachmentKHR | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+
+  using FramebufferCreateFlags = Flags<FramebufferCreateFlagBits>;
+
+  template <> struct FlagTraits<FramebufferCreateFlagBits>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(FramebufferCreateFlagBits::eImageless)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FramebufferCreateFlags operator|( FramebufferCreateFlagBits bit0, FramebufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return FramebufferCreateFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FramebufferCreateFlags operator&( FramebufferCreateFlagBits bit0, FramebufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return FramebufferCreateFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FramebufferCreateFlags operator^( FramebufferCreateFlagBits bit0, FramebufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return FramebufferCreateFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FramebufferCreateFlags operator~( FramebufferCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( FramebufferCreateFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( FramebufferCreateFlags value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & FramebufferCreateFlagBits::eImageless ) result += "Imageless | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+
+  using GeometryFlagsKHR = Flags<GeometryFlagBitsKHR>;
+
+  template <> struct FlagTraits<GeometryFlagBitsKHR>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(GeometryFlagBitsKHR::eOpaque) | VkFlags(GeometryFlagBitsKHR::eNoDuplicateAnyHitInvocation)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsKHR operator|( GeometryFlagBitsKHR bit0, GeometryFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return GeometryFlagsKHR( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsKHR operator&( GeometryFlagBitsKHR bit0, GeometryFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return GeometryFlagsKHR( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsKHR operator^( GeometryFlagBitsKHR bit0, GeometryFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return GeometryFlagsKHR( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsKHR operator~( GeometryFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( GeometryFlagsKHR( bits ) );
+  }
+
+  using GeometryFlagsNV = GeometryFlagsKHR;
+
+  VULKAN_HPP_INLINE std::string to_string( GeometryFlagsKHR value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & GeometryFlagBitsKHR::eOpaque ) result += "Opaque | ";
+    if ( value & GeometryFlagBitsKHR::eNoDuplicateAnyHitInvocation ) result += "NoDuplicateAnyHitInvocation | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+
+  using GeometryInstanceFlagsKHR = Flags<GeometryInstanceFlagBitsKHR>;
+
+  template <> struct FlagTraits<GeometryInstanceFlagBitsKHR>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(GeometryInstanceFlagBitsKHR::eTriangleFacingCullDisable) | VkFlags(GeometryInstanceFlagBitsKHR::eTriangleFrontCounterclockwise) | VkFlags(GeometryInstanceFlagBitsKHR::eForceOpaque) | VkFlags(GeometryInstanceFlagBitsKHR::eForceNoOpaque)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsKHR operator|( GeometryInstanceFlagBitsKHR bit0, GeometryInstanceFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return GeometryInstanceFlagsKHR( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsKHR operator&( GeometryInstanceFlagBitsKHR bit0, GeometryInstanceFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return GeometryInstanceFlagsKHR( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsKHR operator^( GeometryInstanceFlagBitsKHR bit0, GeometryInstanceFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return GeometryInstanceFlagsKHR( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsKHR operator~( GeometryInstanceFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( GeometryInstanceFlagsKHR( bits ) );
+  }
+
+  using GeometryInstanceFlagsNV = GeometryInstanceFlagsKHR;
+
+  VULKAN_HPP_INLINE std::string to_string( GeometryInstanceFlagsKHR value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & GeometryInstanceFlagBitsKHR::eTriangleFacingCullDisable ) result += "TriangleFacingCullDisable | ";
+    if ( value & GeometryInstanceFlagBitsKHR::eTriangleFrontCounterclockwise ) result += "TriangleFrontCounterclockwise | ";
+    if ( value & GeometryInstanceFlagBitsKHR::eForceOpaque ) result += "ForceOpaque | ";
+    if ( value & GeometryInstanceFlagBitsKHR::eForceNoOpaque ) result += "ForceNoOpaque | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class HeadlessSurfaceCreateFlagBitsEXT : VkFlags
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( HeadlessSurfaceCreateFlagBitsEXT )
+  {
+    return "(void)";
+  }
+
+  using HeadlessSurfaceCreateFlagsEXT = Flags<HeadlessSurfaceCreateFlagBitsEXT>;
+
+  VULKAN_HPP_INLINE std::string to_string( HeadlessSurfaceCreateFlagsEXT  )
+  {
+
+    return "{}";
+  }
+
+#ifdef VK_USE_PLATFORM_IOS_MVK
+  enum class IOSSurfaceCreateFlagBitsMVK : VkFlags
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( IOSSurfaceCreateFlagBitsMVK )
+  {
+    return "(void)";
+  }
+
+  using IOSSurfaceCreateFlagsMVK = Flags<IOSSurfaceCreateFlagBitsMVK>;
+
+  VULKAN_HPP_INLINE std::string to_string( IOSSurfaceCreateFlagsMVK  )
+  {
+
+    return "{}";
+  }
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+
+
+  using ImageAspectFlags = Flags<ImageAspectFlagBits>;
+
+  template <> struct FlagTraits<ImageAspectFlagBits>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(ImageAspectFlagBits::eColor) | VkFlags(ImageAspectFlagBits::eDepth) | VkFlags(ImageAspectFlagBits::eStencil) | VkFlags(ImageAspectFlagBits::eMetadata) | VkFlags(ImageAspectFlagBits::ePlane0) | VkFlags(ImageAspectFlagBits::ePlane1) | VkFlags(ImageAspectFlagBits::ePlane2) | VkFlags(ImageAspectFlagBits::eMemoryPlane0EXT) | VkFlags(ImageAspectFlagBits::eMemoryPlane1EXT) | VkFlags(ImageAspectFlagBits::eMemoryPlane2EXT) | VkFlags(ImageAspectFlagBits::eMemoryPlane3EXT)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageAspectFlags operator|( ImageAspectFlagBits bit0, ImageAspectFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ImageAspectFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageAspectFlags operator&( ImageAspectFlagBits bit0, ImageAspectFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ImageAspectFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageAspectFlags operator^( ImageAspectFlagBits bit0, ImageAspectFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ImageAspectFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageAspectFlags operator~( ImageAspectFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( ImageAspectFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( ImageAspectFlags value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & ImageAspectFlagBits::eColor ) result += "Color | ";
+    if ( value & ImageAspectFlagBits::eDepth ) result += "Depth | ";
+    if ( value & ImageAspectFlagBits::eStencil ) result += "Stencil | ";
+    if ( value & ImageAspectFlagBits::eMetadata ) result += "Metadata | ";
+    if ( value & ImageAspectFlagBits::ePlane0 ) result += "Plane0 | ";
+    if ( value & ImageAspectFlagBits::ePlane1 ) result += "Plane1 | ";
+    if ( value & ImageAspectFlagBits::ePlane2 ) result += "Plane2 | ";
+    if ( value & ImageAspectFlagBits::eMemoryPlane0EXT ) result += "MemoryPlane0EXT | ";
+    if ( value & ImageAspectFlagBits::eMemoryPlane1EXT ) result += "MemoryPlane1EXT | ";
+    if ( value & ImageAspectFlagBits::eMemoryPlane2EXT ) result += "MemoryPlane2EXT | ";
+    if ( value & ImageAspectFlagBits::eMemoryPlane3EXT ) result += "MemoryPlane3EXT | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+
+  using ImageCreateFlags = Flags<ImageCreateFlagBits>;
+
+  template <> struct FlagTraits<ImageCreateFlagBits>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(ImageCreateFlagBits::eSparseBinding) | VkFlags(ImageCreateFlagBits::eSparseResidency) | VkFlags(ImageCreateFlagBits::eSparseAliased) | VkFlags(ImageCreateFlagBits::eMutableFormat) | VkFlags(ImageCreateFlagBits::eCubeCompatible) | VkFlags(ImageCreateFlagBits::eAlias) | VkFlags(ImageCreateFlagBits::eSplitInstanceBindRegions) | VkFlags(ImageCreateFlagBits::e2DArrayCompatible) | VkFlags(ImageCreateFlagBits::eBlockTexelViewCompatible) | VkFlags(ImageCreateFlagBits::eExtendedUsage) | VkFlags(ImageCreateFlagBits::eProtected) | VkFlags(ImageCreateFlagBits::eDisjoint) | VkFlags(ImageCreateFlagBits::eCornerSampledNV) | VkFlags(ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT) | VkFlags(ImageCreateFlagBits::eSubsampledEXT)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageCreateFlags operator|( ImageCreateFlagBits bit0, ImageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ImageCreateFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageCreateFlags operator&( ImageCreateFlagBits bit0, ImageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ImageCreateFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageCreateFlags operator^( ImageCreateFlagBits bit0, ImageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ImageCreateFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageCreateFlags operator~( ImageCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( ImageCreateFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( ImageCreateFlags value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & ImageCreateFlagBits::eSparseBinding ) result += "SparseBinding | ";
+    if ( value & ImageCreateFlagBits::eSparseResidency ) result += "SparseResidency | ";
+    if ( value & ImageCreateFlagBits::eSparseAliased ) result += "SparseAliased | ";
+    if ( value & ImageCreateFlagBits::eMutableFormat ) result += "MutableFormat | ";
+    if ( value & ImageCreateFlagBits::eCubeCompatible ) result += "CubeCompatible | ";
+    if ( value & ImageCreateFlagBits::eAlias ) result += "Alias | ";
+    if ( value & ImageCreateFlagBits::eSplitInstanceBindRegions ) result += "SplitInstanceBindRegions | ";
+    if ( value & ImageCreateFlagBits::e2DArrayCompatible ) result += "2DArrayCompatible | ";
+    if ( value & ImageCreateFlagBits::eBlockTexelViewCompatible ) result += "BlockTexelViewCompatible | ";
+    if ( value & ImageCreateFlagBits::eExtendedUsage ) result += "ExtendedUsage | ";
+    if ( value & ImageCreateFlagBits::eProtected ) result += "Protected | ";
+    if ( value & ImageCreateFlagBits::eDisjoint ) result += "Disjoint | ";
+    if ( value & ImageCreateFlagBits::eCornerSampledNV ) result += "CornerSampledNV | ";
+    if ( value & ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT ) result += "SampleLocationsCompatibleDepthEXT | ";
+    if ( value & ImageCreateFlagBits::eSubsampledEXT ) result += "SubsampledEXT | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+#ifdef VK_USE_PLATFORM_FUCHSIA
+  enum class ImagePipeSurfaceCreateFlagBitsFUCHSIA : VkFlags
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( ImagePipeSurfaceCreateFlagBitsFUCHSIA )
+  {
+    return "(void)";
+  }
+
+  using ImagePipeSurfaceCreateFlagsFUCHSIA = Flags<ImagePipeSurfaceCreateFlagBitsFUCHSIA>;
+
+  VULKAN_HPP_INLINE std::string to_string( ImagePipeSurfaceCreateFlagsFUCHSIA  )
+  {
+
+    return "{}";
+  }
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+
+  using ImageUsageFlags = Flags<ImageUsageFlagBits>;
+
+  template <> struct FlagTraits<ImageUsageFlagBits>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(ImageUsageFlagBits::eTransferSrc) | VkFlags(ImageUsageFlagBits::eTransferDst) | VkFlags(ImageUsageFlagBits::eSampled) | VkFlags(ImageUsageFlagBits::eStorage) | VkFlags(ImageUsageFlagBits::eColorAttachment) | VkFlags(ImageUsageFlagBits::eDepthStencilAttachment) | VkFlags(ImageUsageFlagBits::eTransientAttachment) | VkFlags(ImageUsageFlagBits::eInputAttachment) | VkFlags(ImageUsageFlagBits::eShadingRateImageNV) | VkFlags(ImageUsageFlagBits::eFragmentDensityMapEXT)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageUsageFlags operator|( ImageUsageFlagBits bit0, ImageUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ImageUsageFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageUsageFlags operator&( ImageUsageFlagBits bit0, ImageUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ImageUsageFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageUsageFlags operator^( ImageUsageFlagBits bit0, ImageUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ImageUsageFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageUsageFlags operator~( ImageUsageFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( ImageUsageFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( ImageUsageFlags value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & ImageUsageFlagBits::eTransferSrc ) result += "TransferSrc | ";
+    if ( value & ImageUsageFlagBits::eTransferDst ) result += "TransferDst | ";
+    if ( value & ImageUsageFlagBits::eSampled ) result += "Sampled | ";
+    if ( value & ImageUsageFlagBits::eStorage ) result += "Storage | ";
+    if ( value & ImageUsageFlagBits::eColorAttachment ) result += "ColorAttachment | ";
+    if ( value & ImageUsageFlagBits::eDepthStencilAttachment ) result += "DepthStencilAttachment | ";
+    if ( value & ImageUsageFlagBits::eTransientAttachment ) result += "TransientAttachment | ";
+    if ( value & ImageUsageFlagBits::eInputAttachment ) result += "InputAttachment | ";
+    if ( value & ImageUsageFlagBits::eShadingRateImageNV ) result += "ShadingRateImageNV | ";
+    if ( value & ImageUsageFlagBits::eFragmentDensityMapEXT ) result += "FragmentDensityMapEXT | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+
+  using ImageViewCreateFlags = Flags<ImageViewCreateFlagBits>;
+
+  template <> struct FlagTraits<ImageViewCreateFlagBits>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT) | VkFlags(ImageViewCreateFlagBits::eFragmentDensityMapDeferredEXT)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageViewCreateFlags operator|( ImageViewCreateFlagBits bit0, ImageViewCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ImageViewCreateFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageViewCreateFlags operator&( ImageViewCreateFlagBits bit0, ImageViewCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ImageViewCreateFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageViewCreateFlags operator^( ImageViewCreateFlagBits bit0, ImageViewCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ImageViewCreateFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageViewCreateFlags operator~( ImageViewCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( ImageViewCreateFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( ImageViewCreateFlags value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT ) result += "FragmentDensityMapDynamicEXT | ";
+    if ( value & ImageViewCreateFlagBits::eFragmentDensityMapDeferredEXT ) result += "FragmentDensityMapDeferredEXT | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+
+  using IndirectCommandsLayoutUsageFlagsNV = Flags<IndirectCommandsLayoutUsageFlagBitsNV>;
+
+  template <> struct FlagTraits<IndirectCommandsLayoutUsageFlagBitsNV>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(IndirectCommandsLayoutUsageFlagBitsNV::eExplicitPreprocess) | VkFlags(IndirectCommandsLayoutUsageFlagBitsNV::eIndexedSequences) | VkFlags(IndirectCommandsLayoutUsageFlagBitsNV::eUnorderedSequences)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNV operator|( IndirectCommandsLayoutUsageFlagBitsNV bit0, IndirectCommandsLayoutUsageFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return IndirectCommandsLayoutUsageFlagsNV( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNV operator&( IndirectCommandsLayoutUsageFlagBitsNV bit0, IndirectCommandsLayoutUsageFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return IndirectCommandsLayoutUsageFlagsNV( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNV operator^( IndirectCommandsLayoutUsageFlagBitsNV bit0, IndirectCommandsLayoutUsageFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return IndirectCommandsLayoutUsageFlagsNV( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNV operator~( IndirectCommandsLayoutUsageFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( IndirectCommandsLayoutUsageFlagsNV( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( IndirectCommandsLayoutUsageFlagsNV value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & IndirectCommandsLayoutUsageFlagBitsNV::eExplicitPreprocess ) result += "ExplicitPreprocess | ";
+    if ( value & IndirectCommandsLayoutUsageFlagBitsNV::eIndexedSequences ) result += "IndexedSequences | ";
+    if ( value & IndirectCommandsLayoutUsageFlagBitsNV::eUnorderedSequences ) result += "UnorderedSequences | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+
+  using IndirectStateFlagsNV = Flags<IndirectStateFlagBitsNV>;
+
+  template <> struct FlagTraits<IndirectStateFlagBitsNV>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(IndirectStateFlagBitsNV::eFlagFrontface)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectStateFlagsNV operator|( IndirectStateFlagBitsNV bit0, IndirectStateFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return IndirectStateFlagsNV( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectStateFlagsNV operator&( IndirectStateFlagBitsNV bit0, IndirectStateFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return IndirectStateFlagsNV( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectStateFlagsNV operator^( IndirectStateFlagBitsNV bit0, IndirectStateFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return IndirectStateFlagsNV( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectStateFlagsNV operator~( IndirectStateFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( IndirectStateFlagsNV( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( IndirectStateFlagsNV value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & IndirectStateFlagBitsNV::eFlagFrontface ) result += "FlagFrontface | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+
+  using InstanceCreateFlags = Flags<InstanceCreateFlagBits>;
+
+  VULKAN_HPP_INLINE std::string to_string( InstanceCreateFlags  )
+  {
+
+    return "{}";
+  }
+
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+  enum class MacOSSurfaceCreateFlagBitsMVK : VkFlags
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( MacOSSurfaceCreateFlagBitsMVK )
+  {
+    return "(void)";
+  }
+
+  using MacOSSurfaceCreateFlagsMVK = Flags<MacOSSurfaceCreateFlagBitsMVK>;
+
+  VULKAN_HPP_INLINE std::string to_string( MacOSSurfaceCreateFlagsMVK  )
+  {
+
+    return "{}";
+  }
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+
+
+  using MemoryAllocateFlags = Flags<MemoryAllocateFlagBits>;
+
+  template <> struct FlagTraits<MemoryAllocateFlagBits>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(MemoryAllocateFlagBits::eDeviceMask) | VkFlags(MemoryAllocateFlagBits::eDeviceAddress) | VkFlags(MemoryAllocateFlagBits::eDeviceAddressCaptureReplay)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryAllocateFlags operator|( MemoryAllocateFlagBits bit0, MemoryAllocateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return MemoryAllocateFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryAllocateFlags operator&( MemoryAllocateFlagBits bit0, MemoryAllocateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return MemoryAllocateFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryAllocateFlags operator^( MemoryAllocateFlagBits bit0, MemoryAllocateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return MemoryAllocateFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryAllocateFlags operator~( MemoryAllocateFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( MemoryAllocateFlags( bits ) );
+  }
+
+  using MemoryAllocateFlagsKHR = MemoryAllocateFlags;
+
+  VULKAN_HPP_INLINE std::string to_string( MemoryAllocateFlags value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & MemoryAllocateFlagBits::eDeviceMask ) result += "DeviceMask | ";
+    if ( value & MemoryAllocateFlagBits::eDeviceAddress ) result += "DeviceAddress | ";
+    if ( value & MemoryAllocateFlagBits::eDeviceAddressCaptureReplay ) result += "DeviceAddressCaptureReplay | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+
+  using MemoryHeapFlags = Flags<MemoryHeapFlagBits>;
+
+  template <> struct FlagTraits<MemoryHeapFlagBits>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(MemoryHeapFlagBits::eDeviceLocal) | VkFlags(MemoryHeapFlagBits::eMultiInstance)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryHeapFlags operator|( MemoryHeapFlagBits bit0, MemoryHeapFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return MemoryHeapFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryHeapFlags operator&( MemoryHeapFlagBits bit0, MemoryHeapFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return MemoryHeapFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryHeapFlags operator^( MemoryHeapFlagBits bit0, MemoryHeapFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return MemoryHeapFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryHeapFlags operator~( MemoryHeapFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( MemoryHeapFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( MemoryHeapFlags value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & MemoryHeapFlagBits::eDeviceLocal ) result += "DeviceLocal | ";
+    if ( value & MemoryHeapFlagBits::eMultiInstance ) result += "MultiInstance | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class MemoryMapFlagBits : VkFlags
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( MemoryMapFlagBits )
+  {
+    return "(void)";
+  }
+
+  using MemoryMapFlags = Flags<MemoryMapFlagBits>;
+
+  VULKAN_HPP_INLINE std::string to_string( MemoryMapFlags  )
+  {
+
+    return "{}";
+  }
+
+
+  using MemoryPropertyFlags = Flags<MemoryPropertyFlagBits>;
+
+  template <> struct FlagTraits<MemoryPropertyFlagBits>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(MemoryPropertyFlagBits::eDeviceLocal) | VkFlags(MemoryPropertyFlagBits::eHostVisible) | VkFlags(MemoryPropertyFlagBits::eHostCoherent) | VkFlags(MemoryPropertyFlagBits::eHostCached) | VkFlags(MemoryPropertyFlagBits::eLazilyAllocated) | VkFlags(MemoryPropertyFlagBits::eProtected) | VkFlags(MemoryPropertyFlagBits::eDeviceCoherentAMD) | VkFlags(MemoryPropertyFlagBits::eDeviceUncachedAMD)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryPropertyFlags operator|( MemoryPropertyFlagBits bit0, MemoryPropertyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return MemoryPropertyFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryPropertyFlags operator&( MemoryPropertyFlagBits bit0, MemoryPropertyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return MemoryPropertyFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryPropertyFlags operator^( MemoryPropertyFlagBits bit0, MemoryPropertyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return MemoryPropertyFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryPropertyFlags operator~( MemoryPropertyFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( MemoryPropertyFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( MemoryPropertyFlags value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & MemoryPropertyFlagBits::eDeviceLocal ) result += "DeviceLocal | ";
+    if ( value & MemoryPropertyFlagBits::eHostVisible ) result += "HostVisible | ";
+    if ( value & MemoryPropertyFlagBits::eHostCoherent ) result += "HostCoherent | ";
+    if ( value & MemoryPropertyFlagBits::eHostCached ) result += "HostCached | ";
+    if ( value & MemoryPropertyFlagBits::eLazilyAllocated ) result += "LazilyAllocated | ";
+    if ( value & MemoryPropertyFlagBits::eProtected ) result += "Protected | ";
+    if ( value & MemoryPropertyFlagBits::eDeviceCoherentAMD ) result += "DeviceCoherentAMD | ";
+    if ( value & MemoryPropertyFlagBits::eDeviceUncachedAMD ) result += "DeviceUncachedAMD | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+#ifdef VK_USE_PLATFORM_METAL_EXT
+  enum class MetalSurfaceCreateFlagBitsEXT : VkFlags
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( MetalSurfaceCreateFlagBitsEXT )
+  {
+    return "(void)";
+  }
+
+  using MetalSurfaceCreateFlagsEXT = Flags<MetalSurfaceCreateFlagBitsEXT>;
+
+  VULKAN_HPP_INLINE std::string to_string( MetalSurfaceCreateFlagsEXT  )
+  {
+
+    return "{}";
+  }
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+
+  using PeerMemoryFeatureFlags = Flags<PeerMemoryFeatureFlagBits>;
+
+  template <> struct FlagTraits<PeerMemoryFeatureFlagBits>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(PeerMemoryFeatureFlagBits::eCopySrc) | VkFlags(PeerMemoryFeatureFlagBits::eCopyDst) | VkFlags(PeerMemoryFeatureFlagBits::eGenericSrc) | VkFlags(PeerMemoryFeatureFlagBits::eGenericDst)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PeerMemoryFeatureFlags operator|( PeerMemoryFeatureFlagBits bit0, PeerMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return PeerMemoryFeatureFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PeerMemoryFeatureFlags operator&( PeerMemoryFeatureFlagBits bit0, PeerMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return PeerMemoryFeatureFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PeerMemoryFeatureFlags operator^( PeerMemoryFeatureFlagBits bit0, PeerMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return PeerMemoryFeatureFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PeerMemoryFeatureFlags operator~( PeerMemoryFeatureFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( PeerMemoryFeatureFlags( bits ) );
+  }
+
+  using PeerMemoryFeatureFlagsKHR = PeerMemoryFeatureFlags;
+
+  VULKAN_HPP_INLINE std::string to_string( PeerMemoryFeatureFlags value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & PeerMemoryFeatureFlagBits::eCopySrc ) result += "CopySrc | ";
+    if ( value & PeerMemoryFeatureFlagBits::eCopyDst ) result += "CopyDst | ";
+    if ( value & PeerMemoryFeatureFlagBits::eGenericSrc ) result += "GenericSrc | ";
+    if ( value & PeerMemoryFeatureFlagBits::eGenericDst ) result += "GenericDst | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+
+  using PerformanceCounterDescriptionFlagsKHR = Flags<PerformanceCounterDescriptionFlagBitsKHR>;
+
+  template <> struct FlagTraits<PerformanceCounterDescriptionFlagBitsKHR>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(PerformanceCounterDescriptionFlagBitsKHR::ePerformanceImpacting) | VkFlags(PerformanceCounterDescriptionFlagBitsKHR::eConcurrentlyImpacted)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PerformanceCounterDescriptionFlagsKHR operator|( PerformanceCounterDescriptionFlagBitsKHR bit0, PerformanceCounterDescriptionFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return PerformanceCounterDescriptionFlagsKHR( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PerformanceCounterDescriptionFlagsKHR operator&( PerformanceCounterDescriptionFlagBitsKHR bit0, PerformanceCounterDescriptionFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return PerformanceCounterDescriptionFlagsKHR( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PerformanceCounterDescriptionFlagsKHR operator^( PerformanceCounterDescriptionFlagBitsKHR bit0, PerformanceCounterDescriptionFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return PerformanceCounterDescriptionFlagsKHR( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PerformanceCounterDescriptionFlagsKHR operator~( PerformanceCounterDescriptionFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( PerformanceCounterDescriptionFlagsKHR( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( PerformanceCounterDescriptionFlagsKHR value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & PerformanceCounterDescriptionFlagBitsKHR::ePerformanceImpacting ) result += "PerformanceImpacting | ";
+    if ( value & PerformanceCounterDescriptionFlagBitsKHR::eConcurrentlyImpacted ) result += "ConcurrentlyImpacted | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+
+  using PipelineCacheCreateFlags = Flags<PipelineCacheCreateFlagBits>;
+
+  template <> struct FlagTraits<PipelineCacheCreateFlagBits>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(PipelineCacheCreateFlagBits::eExternallySynchronizedEXT)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCacheCreateFlags operator|( PipelineCacheCreateFlagBits bit0, PipelineCacheCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return PipelineCacheCreateFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCacheCreateFlags operator&( PipelineCacheCreateFlagBits bit0, PipelineCacheCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return PipelineCacheCreateFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCacheCreateFlags operator^( PipelineCacheCreateFlagBits bit0, PipelineCacheCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return PipelineCacheCreateFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCacheCreateFlags operator~( PipelineCacheCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( PipelineCacheCreateFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineCacheCreateFlags value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & PipelineCacheCreateFlagBits::eExternallySynchronizedEXT ) result += "ExternallySynchronizedEXT | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class PipelineColorBlendStateCreateFlagBits : VkFlags
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineColorBlendStateCreateFlagBits )
+  {
+    return "(void)";
+  }
+
+  using PipelineColorBlendStateCreateFlags = Flags<PipelineColorBlendStateCreateFlagBits>;
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineColorBlendStateCreateFlags  )
+  {
+
+    return "{}";
+  }
+
+
+  using PipelineCompilerControlFlagsAMD = Flags<PipelineCompilerControlFlagBitsAMD>;
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineCompilerControlFlagsAMD  )
+  {
+
+    return "{}";
+  }
+
+  enum class PipelineCoverageModulationStateCreateFlagBitsNV : VkFlags
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineCoverageModulationStateCreateFlagBitsNV )
+  {
+    return "(void)";
+  }
+
+  using PipelineCoverageModulationStateCreateFlagsNV = Flags<PipelineCoverageModulationStateCreateFlagBitsNV>;
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineCoverageModulationStateCreateFlagsNV  )
+  {
+
+    return "{}";
+  }
+
+  enum class PipelineCoverageReductionStateCreateFlagBitsNV : VkFlags
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineCoverageReductionStateCreateFlagBitsNV )
+  {
+    return "(void)";
+  }
+
+  using PipelineCoverageReductionStateCreateFlagsNV = Flags<PipelineCoverageReductionStateCreateFlagBitsNV>;
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineCoverageReductionStateCreateFlagsNV  )
+  {
+
+    return "{}";
+  }
+
+  enum class PipelineCoverageToColorStateCreateFlagBitsNV : VkFlags
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineCoverageToColorStateCreateFlagBitsNV )
+  {
+    return "(void)";
+  }
+
+  using PipelineCoverageToColorStateCreateFlagsNV = Flags<PipelineCoverageToColorStateCreateFlagBitsNV>;
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineCoverageToColorStateCreateFlagsNV  )
+  {
+
+    return "{}";
+  }
+
+
+  using PipelineCreateFlags = Flags<PipelineCreateFlagBits>;
+
+  template <> struct FlagTraits<PipelineCreateFlagBits>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(PipelineCreateFlagBits::eDisableOptimization) | VkFlags(PipelineCreateFlagBits::eAllowDerivatives) | VkFlags(PipelineCreateFlagBits::eDerivative) | VkFlags(PipelineCreateFlagBits::eViewIndexFromDeviceIndex) | VkFlags(PipelineCreateFlagBits::eDispatchBase) | VkFlags(PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR) | VkFlags(PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR) | VkFlags(PipelineCreateFlagBits::eRayTracingNoNullMissShadersKHR) | VkFlags(PipelineCreateFlagBits::eRayTracingNoNullIntersectionShadersKHR) | VkFlags(PipelineCreateFlagBits::eRayTracingSkipTrianglesKHR) | VkFlags(PipelineCreateFlagBits::eRayTracingSkipAabbsKHR) | VkFlags(PipelineCreateFlagBits::eDeferCompileNV) | VkFlags(PipelineCreateFlagBits::eCaptureStatisticsKHR) | VkFlags(PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR) | VkFlags(PipelineCreateFlagBits::eIndirectBindableNV) | VkFlags(PipelineCreateFlagBits::eLibraryKHR) | VkFlags(PipelineCreateFlagBits::eFailOnPipelineCompileRequiredEXT) | VkFlags(PipelineCreateFlagBits::eEarlyReturnOnFailureEXT)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreateFlags operator|( PipelineCreateFlagBits bit0, PipelineCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return PipelineCreateFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreateFlags operator&( PipelineCreateFlagBits bit0, PipelineCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return PipelineCreateFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreateFlags operator^( PipelineCreateFlagBits bit0, PipelineCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return PipelineCreateFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreateFlags operator~( PipelineCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( PipelineCreateFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineCreateFlags value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & PipelineCreateFlagBits::eDisableOptimization ) result += "DisableOptimization | ";
+    if ( value & PipelineCreateFlagBits::eAllowDerivatives ) result += "AllowDerivatives | ";
+    if ( value & PipelineCreateFlagBits::eDerivative ) result += "Derivative | ";
+    if ( value & PipelineCreateFlagBits::eViewIndexFromDeviceIndex ) result += "ViewIndexFromDeviceIndex | ";
+    if ( value & PipelineCreateFlagBits::eDispatchBase ) result += "DispatchBase | ";
+    if ( value & PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR ) result += "RayTracingNoNullAnyHitShadersKHR | ";
+    if ( value & PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR ) result += "RayTracingNoNullClosestHitShadersKHR | ";
+    if ( value & PipelineCreateFlagBits::eRayTracingNoNullMissShadersKHR ) result += "RayTracingNoNullMissShadersKHR | ";
+    if ( value & PipelineCreateFlagBits::eRayTracingNoNullIntersectionShadersKHR ) result += "RayTracingNoNullIntersectionShadersKHR | ";
+    if ( value & PipelineCreateFlagBits::eRayTracingSkipTrianglesKHR ) result += "RayTracingSkipTrianglesKHR | ";
+    if ( value & PipelineCreateFlagBits::eRayTracingSkipAabbsKHR ) result += "RayTracingSkipAabbsKHR | ";
+    if ( value & PipelineCreateFlagBits::eDeferCompileNV ) result += "DeferCompileNV | ";
+    if ( value & PipelineCreateFlagBits::eCaptureStatisticsKHR ) result += "CaptureStatisticsKHR | ";
+    if ( value & PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR ) result += "CaptureInternalRepresentationsKHR | ";
+    if ( value & PipelineCreateFlagBits::eIndirectBindableNV ) result += "IndirectBindableNV | ";
+    if ( value & PipelineCreateFlagBits::eLibraryKHR ) result += "LibraryKHR | ";
+    if ( value & PipelineCreateFlagBits::eFailOnPipelineCompileRequiredEXT ) result += "FailOnPipelineCompileRequiredEXT | ";
+    if ( value & PipelineCreateFlagBits::eEarlyReturnOnFailureEXT ) result += "EarlyReturnOnFailureEXT | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+
+  using PipelineCreationFeedbackFlagsEXT = Flags<PipelineCreationFeedbackFlagBitsEXT>;
+
+  template <> struct FlagTraits<PipelineCreationFeedbackFlagBitsEXT>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(PipelineCreationFeedbackFlagBitsEXT::eValid) | VkFlags(PipelineCreationFeedbackFlagBitsEXT::eApplicationPipelineCacheHit) | VkFlags(PipelineCreationFeedbackFlagBitsEXT::eBasePipelineAcceleration)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackFlagsEXT operator|( PipelineCreationFeedbackFlagBitsEXT bit0, PipelineCreationFeedbackFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return PipelineCreationFeedbackFlagsEXT( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackFlagsEXT operator&( PipelineCreationFeedbackFlagBitsEXT bit0, PipelineCreationFeedbackFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return PipelineCreationFeedbackFlagsEXT( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackFlagsEXT operator^( PipelineCreationFeedbackFlagBitsEXT bit0, PipelineCreationFeedbackFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return PipelineCreationFeedbackFlagsEXT( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackFlagsEXT operator~( PipelineCreationFeedbackFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( PipelineCreationFeedbackFlagsEXT( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineCreationFeedbackFlagsEXT value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & PipelineCreationFeedbackFlagBitsEXT::eValid ) result += "Valid | ";
+    if ( value & PipelineCreationFeedbackFlagBitsEXT::eApplicationPipelineCacheHit ) result += "ApplicationPipelineCacheHit | ";
+    if ( value & PipelineCreationFeedbackFlagBitsEXT::eBasePipelineAcceleration ) result += "BasePipelineAcceleration | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class PipelineDepthStencilStateCreateFlagBits : VkFlags
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineDepthStencilStateCreateFlagBits )
+  {
+    return "(void)";
+  }
+
+  using PipelineDepthStencilStateCreateFlags = Flags<PipelineDepthStencilStateCreateFlagBits>;
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineDepthStencilStateCreateFlags  )
+  {
+
+    return "{}";
+  }
+
+  enum class PipelineDiscardRectangleStateCreateFlagBitsEXT : VkFlags
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineDiscardRectangleStateCreateFlagBitsEXT )
+  {
+    return "(void)";
+  }
+
+  using PipelineDiscardRectangleStateCreateFlagsEXT = Flags<PipelineDiscardRectangleStateCreateFlagBitsEXT>;
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineDiscardRectangleStateCreateFlagsEXT  )
+  {
+
+    return "{}";
+  }
+
+  enum class PipelineDynamicStateCreateFlagBits : VkFlags
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineDynamicStateCreateFlagBits )
+  {
+    return "(void)";
+  }
+
+  using PipelineDynamicStateCreateFlags = Flags<PipelineDynamicStateCreateFlagBits>;
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineDynamicStateCreateFlags  )
+  {
+
+    return "{}";
+  }
+
+  enum class PipelineInputAssemblyStateCreateFlagBits : VkFlags
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineInputAssemblyStateCreateFlagBits )
+  {
+    return "(void)";
+  }
+
+  using PipelineInputAssemblyStateCreateFlags = Flags<PipelineInputAssemblyStateCreateFlagBits>;
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineInputAssemblyStateCreateFlags  )
+  {
+
+    return "{}";
+  }
+
+  enum class PipelineLayoutCreateFlagBits : VkFlags
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineLayoutCreateFlagBits )
+  {
+    return "(void)";
+  }
+
+  using PipelineLayoutCreateFlags = Flags<PipelineLayoutCreateFlagBits>;
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineLayoutCreateFlags  )
+  {
+
+    return "{}";
+  }
+
+  enum class PipelineMultisampleStateCreateFlagBits : VkFlags
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineMultisampleStateCreateFlagBits )
+  {
+    return "(void)";
+  }
+
+  using PipelineMultisampleStateCreateFlags = Flags<PipelineMultisampleStateCreateFlagBits>;
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineMultisampleStateCreateFlags  )
+  {
+
+    return "{}";
+  }
+
+  enum class PipelineRasterizationConservativeStateCreateFlagBitsEXT : VkFlags
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationConservativeStateCreateFlagBitsEXT )
+  {
+    return "(void)";
+  }
+
+  using PipelineRasterizationConservativeStateCreateFlagsEXT = Flags<PipelineRasterizationConservativeStateCreateFlagBitsEXT>;
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationConservativeStateCreateFlagsEXT  )
+  {
+
+    return "{}";
+  }
+
+  enum class PipelineRasterizationDepthClipStateCreateFlagBitsEXT : VkFlags
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationDepthClipStateCreateFlagBitsEXT )
+  {
+    return "(void)";
+  }
+
+  using PipelineRasterizationDepthClipStateCreateFlagsEXT = Flags<PipelineRasterizationDepthClipStateCreateFlagBitsEXT>;
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationDepthClipStateCreateFlagsEXT  )
+  {
+
+    return "{}";
+  }
+
+  enum class PipelineRasterizationStateCreateFlagBits : VkFlags
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateCreateFlagBits )
+  {
+    return "(void)";
+  }
+
+  using PipelineRasterizationStateCreateFlags = Flags<PipelineRasterizationStateCreateFlagBits>;
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateCreateFlags  )
+  {
+
+    return "{}";
+  }
+
+  enum class PipelineRasterizationStateStreamCreateFlagBitsEXT : VkFlags
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateStreamCreateFlagBitsEXT )
+  {
+    return "(void)";
+  }
+
+  using PipelineRasterizationStateStreamCreateFlagsEXT = Flags<PipelineRasterizationStateStreamCreateFlagBitsEXT>;
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateStreamCreateFlagsEXT  )
+  {
+
+    return "{}";
+  }
+
+
+  using PipelineShaderStageCreateFlags = Flags<PipelineShaderStageCreateFlagBits>;
+
+  template <> struct FlagTraits<PipelineShaderStageCreateFlagBits>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(PipelineShaderStageCreateFlagBits::eAllowVaryingSubgroupSizeEXT) | VkFlags(PipelineShaderStageCreateFlagBits::eRequireFullSubgroupsEXT)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateFlags operator|( PipelineShaderStageCreateFlagBits bit0, PipelineShaderStageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return PipelineShaderStageCreateFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateFlags operator&( PipelineShaderStageCreateFlagBits bit0, PipelineShaderStageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return PipelineShaderStageCreateFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateFlags operator^( PipelineShaderStageCreateFlagBits bit0, PipelineShaderStageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return PipelineShaderStageCreateFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateFlags operator~( PipelineShaderStageCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( PipelineShaderStageCreateFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineShaderStageCreateFlags value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & PipelineShaderStageCreateFlagBits::eAllowVaryingSubgroupSizeEXT ) result += "AllowVaryingSubgroupSizeEXT | ";
+    if ( value & PipelineShaderStageCreateFlagBits::eRequireFullSubgroupsEXT ) result += "RequireFullSubgroupsEXT | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+
+  using PipelineStageFlags = Flags<PipelineStageFlagBits>;
+
+  template <> struct FlagTraits<PipelineStageFlagBits>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(PipelineStageFlagBits::eTopOfPipe) | VkFlags(PipelineStageFlagBits::eDrawIndirect) | VkFlags(PipelineStageFlagBits::eVertexInput) | VkFlags(PipelineStageFlagBits::eVertexShader) | VkFlags(PipelineStageFlagBits::eTessellationControlShader) | VkFlags(PipelineStageFlagBits::eTessellationEvaluationShader) | VkFlags(PipelineStageFlagBits::eGeometryShader) | VkFlags(PipelineStageFlagBits::eFragmentShader) | VkFlags(PipelineStageFlagBits::eEarlyFragmentTests) | VkFlags(PipelineStageFlagBits::eLateFragmentTests) | VkFlags(PipelineStageFlagBits::eColorAttachmentOutput) | VkFlags(PipelineStageFlagBits::eComputeShader) | VkFlags(PipelineStageFlagBits::eTransfer) | VkFlags(PipelineStageFlagBits::eBottomOfPipe) | VkFlags(PipelineStageFlagBits::eHost) | VkFlags(PipelineStageFlagBits::eAllGraphics) | VkFlags(PipelineStageFlagBits::eAllCommands) | VkFlags(PipelineStageFlagBits::eTransformFeedbackEXT) | VkFlags(PipelineStageFlagBits::eConditionalRenderingEXT) | VkFlags(PipelineStageFlagBits::eRayTracingShaderKHR) | VkFlags(PipelineStageFlagBits::eAccelerationStructureBuildKHR) | VkFlags(PipelineStageFlagBits::eShadingRateImageNV) | VkFlags(PipelineStageFlagBits::eTaskShaderNV) | VkFlags(PipelineStageFlagBits::eMeshShaderNV) | VkFlags(PipelineStageFlagBits::eFragmentDensityProcessEXT) | VkFlags(PipelineStageFlagBits::eCommandPreprocessNV)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags operator|( PipelineStageFlagBits bit0, PipelineStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return PipelineStageFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags operator&( PipelineStageFlagBits bit0, PipelineStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return PipelineStageFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags operator^( PipelineStageFlagBits bit0, PipelineStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return PipelineStageFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags operator~( PipelineStageFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( PipelineStageFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineStageFlags value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & PipelineStageFlagBits::eTopOfPipe ) result += "TopOfPipe | ";
+    if ( value & PipelineStageFlagBits::eDrawIndirect ) result += "DrawIndirect | ";
+    if ( value & PipelineStageFlagBits::eVertexInput ) result += "VertexInput | ";
+    if ( value & PipelineStageFlagBits::eVertexShader ) result += "VertexShader | ";
+    if ( value & PipelineStageFlagBits::eTessellationControlShader ) result += "TessellationControlShader | ";
+    if ( value & PipelineStageFlagBits::eTessellationEvaluationShader ) result += "TessellationEvaluationShader | ";
+    if ( value & PipelineStageFlagBits::eGeometryShader ) result += "GeometryShader | ";
+    if ( value & PipelineStageFlagBits::eFragmentShader ) result += "FragmentShader | ";
+    if ( value & PipelineStageFlagBits::eEarlyFragmentTests ) result += "EarlyFragmentTests | ";
+    if ( value & PipelineStageFlagBits::eLateFragmentTests ) result += "LateFragmentTests | ";
+    if ( value & PipelineStageFlagBits::eColorAttachmentOutput ) result += "ColorAttachmentOutput | ";
+    if ( value & PipelineStageFlagBits::eComputeShader ) result += "ComputeShader | ";
+    if ( value & PipelineStageFlagBits::eTransfer ) result += "Transfer | ";
+    if ( value & PipelineStageFlagBits::eBottomOfPipe ) result += "BottomOfPipe | ";
+    if ( value & PipelineStageFlagBits::eHost ) result += "Host | ";
+    if ( value & PipelineStageFlagBits::eAllGraphics ) result += "AllGraphics | ";
+    if ( value & PipelineStageFlagBits::eAllCommands ) result += "AllCommands | ";
+    if ( value & PipelineStageFlagBits::eTransformFeedbackEXT ) result += "TransformFeedbackEXT | ";
+    if ( value & PipelineStageFlagBits::eConditionalRenderingEXT ) result += "ConditionalRenderingEXT | ";
+    if ( value & PipelineStageFlagBits::eRayTracingShaderKHR ) result += "RayTracingShaderKHR | ";
+    if ( value & PipelineStageFlagBits::eAccelerationStructureBuildKHR ) result += "AccelerationStructureBuildKHR | ";
+    if ( value & PipelineStageFlagBits::eShadingRateImageNV ) result += "ShadingRateImageNV | ";
+    if ( value & PipelineStageFlagBits::eTaskShaderNV ) result += "TaskShaderNV | ";
+    if ( value & PipelineStageFlagBits::eMeshShaderNV ) result += "MeshShaderNV | ";
+    if ( value & PipelineStageFlagBits::eFragmentDensityProcessEXT ) result += "FragmentDensityProcessEXT | ";
+    if ( value & PipelineStageFlagBits::eCommandPreprocessNV ) result += "CommandPreprocessNV | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class PipelineTessellationStateCreateFlagBits : VkFlags
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineTessellationStateCreateFlagBits )
+  {
+    return "(void)";
+  }
+
+  using PipelineTessellationStateCreateFlags = Flags<PipelineTessellationStateCreateFlagBits>;
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineTessellationStateCreateFlags  )
+  {
+
+    return "{}";
+  }
+
+  enum class PipelineVertexInputStateCreateFlagBits : VkFlags
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineVertexInputStateCreateFlagBits )
+  {
+    return "(void)";
+  }
+
+  using PipelineVertexInputStateCreateFlags = Flags<PipelineVertexInputStateCreateFlagBits>;
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineVertexInputStateCreateFlags  )
+  {
+
+    return "{}";
+  }
+
+  enum class PipelineViewportStateCreateFlagBits : VkFlags
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineViewportStateCreateFlagBits )
+  {
+    return "(void)";
+  }
+
+  using PipelineViewportStateCreateFlags = Flags<PipelineViewportStateCreateFlagBits>;
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineViewportStateCreateFlags  )
+  {
+
+    return "{}";
+  }
+
+  enum class PipelineViewportSwizzleStateCreateFlagBitsNV : VkFlags
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineViewportSwizzleStateCreateFlagBitsNV )
+  {
+    return "(void)";
+  }
+
+  using PipelineViewportSwizzleStateCreateFlagsNV = Flags<PipelineViewportSwizzleStateCreateFlagBitsNV>;
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineViewportSwizzleStateCreateFlagsNV  )
+  {
+
+    return "{}";
+  }
+
+
+  using PrivateDataSlotCreateFlagsEXT = Flags<PrivateDataSlotCreateFlagBitsEXT>;
+
+  VULKAN_HPP_INLINE std::string to_string( PrivateDataSlotCreateFlagsEXT  )
+  {
+
+    return "{}";
+  }
+
+
+  using QueryControlFlags = Flags<QueryControlFlagBits>;
+
+  template <> struct FlagTraits<QueryControlFlagBits>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(QueryControlFlagBits::ePrecise)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryControlFlags operator|( QueryControlFlagBits bit0, QueryControlFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return QueryControlFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryControlFlags operator&( QueryControlFlagBits bit0, QueryControlFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return QueryControlFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryControlFlags operator^( QueryControlFlagBits bit0, QueryControlFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return QueryControlFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryControlFlags operator~( QueryControlFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( QueryControlFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( QueryControlFlags value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & QueryControlFlagBits::ePrecise ) result += "Precise | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+
+  using QueryPipelineStatisticFlags = Flags<QueryPipelineStatisticFlagBits>;
+
+  template <> struct FlagTraits<QueryPipelineStatisticFlagBits>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(QueryPipelineStatisticFlagBits::eInputAssemblyVertices) | VkFlags(QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives) | VkFlags(QueryPipelineStatisticFlagBits::eVertexShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eGeometryShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives) | VkFlags(QueryPipelineStatisticFlagBits::eClippingInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eClippingPrimitives) | VkFlags(QueryPipelineStatisticFlagBits::eFragmentShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches) | VkFlags(QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eComputeShaderInvocations)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryPipelineStatisticFlags operator|( QueryPipelineStatisticFlagBits bit0, QueryPipelineStatisticFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return QueryPipelineStatisticFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryPipelineStatisticFlags operator&( QueryPipelineStatisticFlagBits bit0, QueryPipelineStatisticFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return QueryPipelineStatisticFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryPipelineStatisticFlags operator^( QueryPipelineStatisticFlagBits bit0, QueryPipelineStatisticFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return QueryPipelineStatisticFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryPipelineStatisticFlags operator~( QueryPipelineStatisticFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( QueryPipelineStatisticFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( QueryPipelineStatisticFlags value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & QueryPipelineStatisticFlagBits::eInputAssemblyVertices ) result += "InputAssemblyVertices | ";
+    if ( value & QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives ) result += "InputAssemblyPrimitives | ";
+    if ( value & QueryPipelineStatisticFlagBits::eVertexShaderInvocations ) result += "VertexShaderInvocations | ";
+    if ( value & QueryPipelineStatisticFlagBits::eGeometryShaderInvocations ) result += "GeometryShaderInvocations | ";
+    if ( value & QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives ) result += "GeometryShaderPrimitives | ";
+    if ( value & QueryPipelineStatisticFlagBits::eClippingInvocations ) result += "ClippingInvocations | ";
+    if ( value & QueryPipelineStatisticFlagBits::eClippingPrimitives ) result += "ClippingPrimitives | ";
+    if ( value & QueryPipelineStatisticFlagBits::eFragmentShaderInvocations ) result += "FragmentShaderInvocations | ";
+    if ( value & QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches ) result += "TessellationControlShaderPatches | ";
+    if ( value & QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations ) result += "TessellationEvaluationShaderInvocations | ";
+    if ( value & QueryPipelineStatisticFlagBits::eComputeShaderInvocations ) result += "ComputeShaderInvocations | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+
+  using QueryPoolCreateFlags = Flags<QueryPoolCreateFlagBits>;
+
+  VULKAN_HPP_INLINE std::string to_string( QueryPoolCreateFlags  )
+  {
+
+    return "{}";
+  }
+
+
+  using QueryResultFlags = Flags<QueryResultFlagBits>;
+
+  template <> struct FlagTraits<QueryResultFlagBits>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(QueryResultFlagBits::e64) | VkFlags(QueryResultFlagBits::eWait) | VkFlags(QueryResultFlagBits::eWithAvailability) | VkFlags(QueryResultFlagBits::ePartial)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryResultFlags operator|( QueryResultFlagBits bit0, QueryResultFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return QueryResultFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryResultFlags operator&( QueryResultFlagBits bit0, QueryResultFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return QueryResultFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryResultFlags operator^( QueryResultFlagBits bit0, QueryResultFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return QueryResultFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryResultFlags operator~( QueryResultFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( QueryResultFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( QueryResultFlags value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & QueryResultFlagBits::e64 ) result += "64 | ";
+    if ( value & QueryResultFlagBits::eWait ) result += "Wait | ";
+    if ( value & QueryResultFlagBits::eWithAvailability ) result += "WithAvailability | ";
+    if ( value & QueryResultFlagBits::ePartial ) result += "Partial | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+
+  using QueueFlags = Flags<QueueFlagBits>;
+
+  template <> struct FlagTraits<QueueFlagBits>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(QueueFlagBits::eGraphics) | VkFlags(QueueFlagBits::eCompute) | VkFlags(QueueFlagBits::eTransfer) | VkFlags(QueueFlagBits::eSparseBinding) | VkFlags(QueueFlagBits::eProtected)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueueFlags operator|( QueueFlagBits bit0, QueueFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return QueueFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueueFlags operator&( QueueFlagBits bit0, QueueFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return QueueFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueueFlags operator^( QueueFlagBits bit0, QueueFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return QueueFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueueFlags operator~( QueueFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( QueueFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( QueueFlags value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & QueueFlagBits::eGraphics ) result += "Graphics | ";
+    if ( value & QueueFlagBits::eCompute ) result += "Compute | ";
+    if ( value & QueueFlagBits::eTransfer ) result += "Transfer | ";
+    if ( value & QueueFlagBits::eSparseBinding ) result += "SparseBinding | ";
+    if ( value & QueueFlagBits::eProtected ) result += "Protected | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+
+  using RenderPassCreateFlags = Flags<RenderPassCreateFlagBits>;
+
+  template <> struct FlagTraits<RenderPassCreateFlagBits>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(RenderPassCreateFlagBits::eTransformQCOM)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderPassCreateFlags operator|( RenderPassCreateFlagBits bit0, RenderPassCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return RenderPassCreateFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderPassCreateFlags operator&( RenderPassCreateFlagBits bit0, RenderPassCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return RenderPassCreateFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderPassCreateFlags operator^( RenderPassCreateFlagBits bit0, RenderPassCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return RenderPassCreateFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderPassCreateFlags operator~( RenderPassCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( RenderPassCreateFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( RenderPassCreateFlags value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & RenderPassCreateFlagBits::eTransformQCOM ) result += "TransformQCOM | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+
+  using ResolveModeFlags = Flags<ResolveModeFlagBits>;
+
+  template <> struct FlagTraits<ResolveModeFlagBits>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(ResolveModeFlagBits::eNone) | VkFlags(ResolveModeFlagBits::eSampleZero) | VkFlags(ResolveModeFlagBits::eAverage) | VkFlags(ResolveModeFlagBits::eMin) | VkFlags(ResolveModeFlagBits::eMax)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ResolveModeFlags operator|( ResolveModeFlagBits bit0, ResolveModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ResolveModeFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ResolveModeFlags operator&( ResolveModeFlagBits bit0, ResolveModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ResolveModeFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ResolveModeFlags operator^( ResolveModeFlagBits bit0, ResolveModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ResolveModeFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ResolveModeFlags operator~( ResolveModeFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( ResolveModeFlags( bits ) );
+  }
+
+  using ResolveModeFlagsKHR = ResolveModeFlags;
+
+  VULKAN_HPP_INLINE std::string to_string( ResolveModeFlags value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & ResolveModeFlagBits::eSampleZero ) result += "SampleZero | ";
+    if ( value & ResolveModeFlagBits::eAverage ) result += "Average | ";
+    if ( value & ResolveModeFlagBits::eMin ) result += "Min | ";
+    if ( value & ResolveModeFlagBits::eMax ) result += "Max | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+
+  using SampleCountFlags = Flags<SampleCountFlagBits>;
+
+  template <> struct FlagTraits<SampleCountFlagBits>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(SampleCountFlagBits::e1) | VkFlags(SampleCountFlagBits::e2) | VkFlags(SampleCountFlagBits::e4) | VkFlags(SampleCountFlagBits::e8) | VkFlags(SampleCountFlagBits::e16) | VkFlags(SampleCountFlagBits::e32) | VkFlags(SampleCountFlagBits::e64)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SampleCountFlags operator|( SampleCountFlagBits bit0, SampleCountFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SampleCountFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SampleCountFlags operator&( SampleCountFlagBits bit0, SampleCountFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SampleCountFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SampleCountFlags operator^( SampleCountFlagBits bit0, SampleCountFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SampleCountFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SampleCountFlags operator~( SampleCountFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( SampleCountFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( SampleCountFlags value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & SampleCountFlagBits::e1 ) result += "1 | ";
+    if ( value & SampleCountFlagBits::e2 ) result += "2 | ";
+    if ( value & SampleCountFlagBits::e4 ) result += "4 | ";
+    if ( value & SampleCountFlagBits::e8 ) result += "8 | ";
+    if ( value & SampleCountFlagBits::e16 ) result += "16 | ";
+    if ( value & SampleCountFlagBits::e32 ) result += "32 | ";
+    if ( value & SampleCountFlagBits::e64 ) result += "64 | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+
+  using SamplerCreateFlags = Flags<SamplerCreateFlagBits>;
+
+  template <> struct FlagTraits<SamplerCreateFlagBits>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(SamplerCreateFlagBits::eSubsampledEXT) | VkFlags(SamplerCreateFlagBits::eSubsampledCoarseReconstructionEXT)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SamplerCreateFlags operator|( SamplerCreateFlagBits bit0, SamplerCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SamplerCreateFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SamplerCreateFlags operator&( SamplerCreateFlagBits bit0, SamplerCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SamplerCreateFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SamplerCreateFlags operator^( SamplerCreateFlagBits bit0, SamplerCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SamplerCreateFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SamplerCreateFlags operator~( SamplerCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( SamplerCreateFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( SamplerCreateFlags value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & SamplerCreateFlagBits::eSubsampledEXT ) result += "SubsampledEXT | ";
+    if ( value & SamplerCreateFlagBits::eSubsampledCoarseReconstructionEXT ) result += "SubsampledCoarseReconstructionEXT | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+
+  using SemaphoreCreateFlags = Flags<SemaphoreCreateFlagBits>;
+
+  VULKAN_HPP_INLINE std::string to_string( SemaphoreCreateFlags  )
+  {
+
+    return "{}";
+  }
+
+
+  using SemaphoreImportFlags = Flags<SemaphoreImportFlagBits>;
+
+  template <> struct FlagTraits<SemaphoreImportFlagBits>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(SemaphoreImportFlagBits::eTemporary)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreImportFlags operator|( SemaphoreImportFlagBits bit0, SemaphoreImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SemaphoreImportFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreImportFlags operator&( SemaphoreImportFlagBits bit0, SemaphoreImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SemaphoreImportFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreImportFlags operator^( SemaphoreImportFlagBits bit0, SemaphoreImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SemaphoreImportFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreImportFlags operator~( SemaphoreImportFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( SemaphoreImportFlags( bits ) );
+  }
+
+  using SemaphoreImportFlagsKHR = SemaphoreImportFlags;
+
+  VULKAN_HPP_INLINE std::string to_string( SemaphoreImportFlags value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & SemaphoreImportFlagBits::eTemporary ) result += "Temporary | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+
+  using SemaphoreWaitFlags = Flags<SemaphoreWaitFlagBits>;
+
+  template <> struct FlagTraits<SemaphoreWaitFlagBits>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(SemaphoreWaitFlagBits::eAny)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreWaitFlags operator|( SemaphoreWaitFlagBits bit0, SemaphoreWaitFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SemaphoreWaitFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreWaitFlags operator&( SemaphoreWaitFlagBits bit0, SemaphoreWaitFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SemaphoreWaitFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreWaitFlags operator^( SemaphoreWaitFlagBits bit0, SemaphoreWaitFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SemaphoreWaitFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreWaitFlags operator~( SemaphoreWaitFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( SemaphoreWaitFlags( bits ) );
+  }
+
+  using SemaphoreWaitFlagsKHR = SemaphoreWaitFlags;
+
+  VULKAN_HPP_INLINE std::string to_string( SemaphoreWaitFlags value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & SemaphoreWaitFlagBits::eAny ) result += "Any | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+
+  using ShaderCorePropertiesFlagsAMD = Flags<ShaderCorePropertiesFlagBitsAMD>;
+
+  VULKAN_HPP_INLINE std::string to_string( ShaderCorePropertiesFlagsAMD  )
+  {
+
+    return "{}";
+  }
+
+
+  using ShaderModuleCreateFlags = Flags<ShaderModuleCreateFlagBits>;
+
+  VULKAN_HPP_INLINE std::string to_string( ShaderModuleCreateFlags  )
+  {
+
+    return "{}";
+  }
+
+
+  using ShaderStageFlags = Flags<ShaderStageFlagBits>;
+
+  template <> struct FlagTraits<ShaderStageFlagBits>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(ShaderStageFlagBits::eVertex) | VkFlags(ShaderStageFlagBits::eTessellationControl) | VkFlags(ShaderStageFlagBits::eTessellationEvaluation) | VkFlags(ShaderStageFlagBits::eGeometry) | VkFlags(ShaderStageFlagBits::eFragment) | VkFlags(ShaderStageFlagBits::eCompute) | VkFlags(ShaderStageFlagBits::eAllGraphics) | VkFlags(ShaderStageFlagBits::eAll) | VkFlags(ShaderStageFlagBits::eRaygenKHR) | VkFlags(ShaderStageFlagBits::eAnyHitKHR) | VkFlags(ShaderStageFlagBits::eClosestHitKHR) | VkFlags(ShaderStageFlagBits::eMissKHR) | VkFlags(ShaderStageFlagBits::eIntersectionKHR) | VkFlags(ShaderStageFlagBits::eCallableKHR) | VkFlags(ShaderStageFlagBits::eTaskNV) | VkFlags(ShaderStageFlagBits::eMeshNV)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ShaderStageFlags operator|( ShaderStageFlagBits bit0, ShaderStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ShaderStageFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ShaderStageFlags operator&( ShaderStageFlagBits bit0, ShaderStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ShaderStageFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ShaderStageFlags operator^( ShaderStageFlagBits bit0, ShaderStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ShaderStageFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ShaderStageFlags operator~( ShaderStageFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( ShaderStageFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( ShaderStageFlags value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & ShaderStageFlagBits::eVertex ) result += "Vertex | ";
+    if ( value & ShaderStageFlagBits::eTessellationControl ) result += "TessellationControl | ";
+    if ( value & ShaderStageFlagBits::eTessellationEvaluation ) result += "TessellationEvaluation | ";
+    if ( value & ShaderStageFlagBits::eGeometry ) result += "Geometry | ";
+    if ( value & ShaderStageFlagBits::eFragment ) result += "Fragment | ";
+    if ( value & ShaderStageFlagBits::eCompute ) result += "Compute | ";
+    if ( value & ShaderStageFlagBits::eRaygenKHR ) result += "RaygenKHR | ";
+    if ( value & ShaderStageFlagBits::eAnyHitKHR ) result += "AnyHitKHR | ";
+    if ( value & ShaderStageFlagBits::eClosestHitKHR ) result += "ClosestHitKHR | ";
+    if ( value & ShaderStageFlagBits::eMissKHR ) result += "MissKHR | ";
+    if ( value & ShaderStageFlagBits::eIntersectionKHR ) result += "IntersectionKHR | ";
+    if ( value & ShaderStageFlagBits::eCallableKHR ) result += "CallableKHR | ";
+    if ( value & ShaderStageFlagBits::eTaskNV ) result += "TaskNV | ";
+    if ( value & ShaderStageFlagBits::eMeshNV ) result += "MeshNV | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+
+  using SparseImageFormatFlags = Flags<SparseImageFormatFlagBits>;
+
+  template <> struct FlagTraits<SparseImageFormatFlagBits>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(SparseImageFormatFlagBits::eSingleMiptail) | VkFlags(SparseImageFormatFlagBits::eAlignedMipSize) | VkFlags(SparseImageFormatFlagBits::eNonstandardBlockSize)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseImageFormatFlags operator|( SparseImageFormatFlagBits bit0, SparseImageFormatFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SparseImageFormatFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseImageFormatFlags operator&( SparseImageFormatFlagBits bit0, SparseImageFormatFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SparseImageFormatFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseImageFormatFlags operator^( SparseImageFormatFlagBits bit0, SparseImageFormatFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SparseImageFormatFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseImageFormatFlags operator~( SparseImageFormatFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( SparseImageFormatFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( SparseImageFormatFlags value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & SparseImageFormatFlagBits::eSingleMiptail ) result += "SingleMiptail | ";
+    if ( value & SparseImageFormatFlagBits::eAlignedMipSize ) result += "AlignedMipSize | ";
+    if ( value & SparseImageFormatFlagBits::eNonstandardBlockSize ) result += "NonstandardBlockSize | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+
+  using SparseMemoryBindFlags = Flags<SparseMemoryBindFlagBits>;
+
+  template <> struct FlagTraits<SparseMemoryBindFlagBits>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(SparseMemoryBindFlagBits::eMetadata)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseMemoryBindFlags operator|( SparseMemoryBindFlagBits bit0, SparseMemoryBindFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SparseMemoryBindFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseMemoryBindFlags operator&( SparseMemoryBindFlagBits bit0, SparseMemoryBindFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SparseMemoryBindFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseMemoryBindFlags operator^( SparseMemoryBindFlagBits bit0, SparseMemoryBindFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SparseMemoryBindFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseMemoryBindFlags operator~( SparseMemoryBindFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( SparseMemoryBindFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( SparseMemoryBindFlags value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & SparseMemoryBindFlagBits::eMetadata ) result += "Metadata | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+
+  using StencilFaceFlags = Flags<StencilFaceFlagBits>;
+
+  template <> struct FlagTraits<StencilFaceFlagBits>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(StencilFaceFlagBits::eFront) | VkFlags(StencilFaceFlagBits::eBack) | VkFlags(StencilFaceFlagBits::eFrontAndBack)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR StencilFaceFlags operator|( StencilFaceFlagBits bit0, StencilFaceFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return StencilFaceFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR StencilFaceFlags operator&( StencilFaceFlagBits bit0, StencilFaceFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return StencilFaceFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR StencilFaceFlags operator^( StencilFaceFlagBits bit0, StencilFaceFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return StencilFaceFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR StencilFaceFlags operator~( StencilFaceFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( StencilFaceFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( StencilFaceFlags value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & StencilFaceFlagBits::eFront ) result += "Front | ";
+    if ( value & StencilFaceFlagBits::eBack ) result += "Back | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+#ifdef VK_USE_PLATFORM_GGP
+  enum class StreamDescriptorSurfaceCreateFlagBitsGGP : VkFlags
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( StreamDescriptorSurfaceCreateFlagBitsGGP )
+  {
+    return "(void)";
+  }
+
+  using StreamDescriptorSurfaceCreateFlagsGGP = Flags<StreamDescriptorSurfaceCreateFlagBitsGGP>;
+
+  VULKAN_HPP_INLINE std::string to_string( StreamDescriptorSurfaceCreateFlagsGGP  )
+  {
+
+    return "{}";
+  }
+#endif /*VK_USE_PLATFORM_GGP*/
+
+
+  using SubgroupFeatureFlags = Flags<SubgroupFeatureFlagBits>;
+
+  template <> struct FlagTraits<SubgroupFeatureFlagBits>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(SubgroupFeatureFlagBits::eBasic) | VkFlags(SubgroupFeatureFlagBits::eVote) | VkFlags(SubgroupFeatureFlagBits::eArithmetic) | VkFlags(SubgroupFeatureFlagBits::eBallot) | VkFlags(SubgroupFeatureFlagBits::eShuffle) | VkFlags(SubgroupFeatureFlagBits::eShuffleRelative) | VkFlags(SubgroupFeatureFlagBits::eClustered) | VkFlags(SubgroupFeatureFlagBits::eQuad) | VkFlags(SubgroupFeatureFlagBits::ePartitionedNV)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubgroupFeatureFlags operator|( SubgroupFeatureFlagBits bit0, SubgroupFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SubgroupFeatureFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubgroupFeatureFlags operator&( SubgroupFeatureFlagBits bit0, SubgroupFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SubgroupFeatureFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubgroupFeatureFlags operator^( SubgroupFeatureFlagBits bit0, SubgroupFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SubgroupFeatureFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubgroupFeatureFlags operator~( SubgroupFeatureFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( SubgroupFeatureFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( SubgroupFeatureFlags value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & SubgroupFeatureFlagBits::eBasic ) result += "Basic | ";
+    if ( value & SubgroupFeatureFlagBits::eVote ) result += "Vote | ";
+    if ( value & SubgroupFeatureFlagBits::eArithmetic ) result += "Arithmetic | ";
+    if ( value & SubgroupFeatureFlagBits::eBallot ) result += "Ballot | ";
+    if ( value & SubgroupFeatureFlagBits::eShuffle ) result += "Shuffle | ";
+    if ( value & SubgroupFeatureFlagBits::eShuffleRelative ) result += "ShuffleRelative | ";
+    if ( value & SubgroupFeatureFlagBits::eClustered ) result += "Clustered | ";
+    if ( value & SubgroupFeatureFlagBits::eQuad ) result += "Quad | ";
+    if ( value & SubgroupFeatureFlagBits::ePartitionedNV ) result += "PartitionedNV | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+
+  using SubpassDescriptionFlags = Flags<SubpassDescriptionFlagBits>;
+
+  template <> struct FlagTraits<SubpassDescriptionFlagBits>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(SubpassDescriptionFlagBits::ePerViewAttributesNVX) | VkFlags(SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX) | VkFlags(SubpassDescriptionFlagBits::eFragmentRegionQCOM) | VkFlags(SubpassDescriptionFlagBits::eShaderResolveQCOM)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubpassDescriptionFlags operator|( SubpassDescriptionFlagBits bit0, SubpassDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SubpassDescriptionFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubpassDescriptionFlags operator&( SubpassDescriptionFlagBits bit0, SubpassDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SubpassDescriptionFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubpassDescriptionFlags operator^( SubpassDescriptionFlagBits bit0, SubpassDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SubpassDescriptionFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubpassDescriptionFlags operator~( SubpassDescriptionFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( SubpassDescriptionFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( SubpassDescriptionFlags value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & SubpassDescriptionFlagBits::ePerViewAttributesNVX ) result += "PerViewAttributesNVX | ";
+    if ( value & SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX ) result += "PerViewPositionXOnlyNVX | ";
+    if ( value & SubpassDescriptionFlagBits::eFragmentRegionQCOM ) result += "FragmentRegionQCOM | ";
+    if ( value & SubpassDescriptionFlagBits::eShaderResolveQCOM ) result += "ShaderResolveQCOM | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+
+  using SurfaceCounterFlagsEXT = Flags<SurfaceCounterFlagBitsEXT>;
+
+  template <> struct FlagTraits<SurfaceCounterFlagBitsEXT>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(SurfaceCounterFlagBitsEXT::eVblank)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceCounterFlagsEXT operator|( SurfaceCounterFlagBitsEXT bit0, SurfaceCounterFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SurfaceCounterFlagsEXT( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceCounterFlagsEXT operator&( SurfaceCounterFlagBitsEXT bit0, SurfaceCounterFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SurfaceCounterFlagsEXT( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceCounterFlagsEXT operator^( SurfaceCounterFlagBitsEXT bit0, SurfaceCounterFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SurfaceCounterFlagsEXT( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceCounterFlagsEXT operator~( SurfaceCounterFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( SurfaceCounterFlagsEXT( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( SurfaceCounterFlagsEXT value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & SurfaceCounterFlagBitsEXT::eVblank ) result += "Vblank | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+
+  using SurfaceTransformFlagsKHR = Flags<SurfaceTransformFlagBitsKHR>;
+
+  template <> struct FlagTraits<SurfaceTransformFlagBitsKHR>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(SurfaceTransformFlagBitsKHR::eIdentity) | VkFlags(SurfaceTransformFlagBitsKHR::eRotate90) | VkFlags(SurfaceTransformFlagBitsKHR::eRotate180) | VkFlags(SurfaceTransformFlagBitsKHR::eRotate270) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirror) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270) | VkFlags(SurfaceTransformFlagBitsKHR::eInherit)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceTransformFlagsKHR operator|( SurfaceTransformFlagBitsKHR bit0, SurfaceTransformFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SurfaceTransformFlagsKHR( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceTransformFlagsKHR operator&( SurfaceTransformFlagBitsKHR bit0, SurfaceTransformFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SurfaceTransformFlagsKHR( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceTransformFlagsKHR operator^( SurfaceTransformFlagBitsKHR bit0, SurfaceTransformFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SurfaceTransformFlagsKHR( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceTransformFlagsKHR operator~( SurfaceTransformFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( SurfaceTransformFlagsKHR( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( SurfaceTransformFlagsKHR value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & SurfaceTransformFlagBitsKHR::eIdentity ) result += "Identity | ";
+    if ( value & SurfaceTransformFlagBitsKHR::eRotate90 ) result += "Rotate90 | ";
+    if ( value & SurfaceTransformFlagBitsKHR::eRotate180 ) result += "Rotate180 | ";
+    if ( value & SurfaceTransformFlagBitsKHR::eRotate270 ) result += "Rotate270 | ";
+    if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirror ) result += "HorizontalMirror | ";
+    if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90 ) result += "HorizontalMirrorRotate90 | ";
+    if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180 ) result += "HorizontalMirrorRotate180 | ";
+    if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270 ) result += "HorizontalMirrorRotate270 | ";
+    if ( value & SurfaceTransformFlagBitsKHR::eInherit ) result += "Inherit | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+
+  using SwapchainCreateFlagsKHR = Flags<SwapchainCreateFlagBitsKHR>;
+
+  template <> struct FlagTraits<SwapchainCreateFlagBitsKHR>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions) | VkFlags(SwapchainCreateFlagBitsKHR::eProtected) | VkFlags(SwapchainCreateFlagBitsKHR::eMutableFormat)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SwapchainCreateFlagsKHR operator|( SwapchainCreateFlagBitsKHR bit0, SwapchainCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SwapchainCreateFlagsKHR( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SwapchainCreateFlagsKHR operator&( SwapchainCreateFlagBitsKHR bit0, SwapchainCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SwapchainCreateFlagsKHR( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SwapchainCreateFlagsKHR operator^( SwapchainCreateFlagBitsKHR bit0, SwapchainCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SwapchainCreateFlagsKHR( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SwapchainCreateFlagsKHR operator~( SwapchainCreateFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( SwapchainCreateFlagsKHR( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( SwapchainCreateFlagsKHR value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions ) result += "SplitInstanceBindRegions | ";
+    if ( value & SwapchainCreateFlagBitsKHR::eProtected ) result += "Protected | ";
+    if ( value & SwapchainCreateFlagBitsKHR::eMutableFormat ) result += "MutableFormat | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+
+  using ToolPurposeFlagsEXT = Flags<ToolPurposeFlagBitsEXT>;
+
+  template <> struct FlagTraits<ToolPurposeFlagBitsEXT>
+  {
+    enum : VkFlags
+    {
+      allFlags = VkFlags(ToolPurposeFlagBitsEXT::eValidation) | VkFlags(ToolPurposeFlagBitsEXT::eProfiling) | VkFlags(ToolPurposeFlagBitsEXT::eTracing) | VkFlags(ToolPurposeFlagBitsEXT::eAdditionalFeatures) | VkFlags(ToolPurposeFlagBitsEXT::eModifyingFeatures) | VkFlags(ToolPurposeFlagBitsEXT::eDebugReporting) | VkFlags(ToolPurposeFlagBitsEXT::eDebugMarkers)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ToolPurposeFlagsEXT operator|( ToolPurposeFlagBitsEXT bit0, ToolPurposeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ToolPurposeFlagsEXT( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ToolPurposeFlagsEXT operator&( ToolPurposeFlagBitsEXT bit0, ToolPurposeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ToolPurposeFlagsEXT( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ToolPurposeFlagsEXT operator^( ToolPurposeFlagBitsEXT bit0, ToolPurposeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ToolPurposeFlagsEXT( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ToolPurposeFlagsEXT operator~( ToolPurposeFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( ToolPurposeFlagsEXT( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( ToolPurposeFlagsEXT value  )
+  {
+
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & ToolPurposeFlagBitsEXT::eValidation ) result += "Validation | ";
+    if ( value & ToolPurposeFlagBitsEXT::eProfiling ) result += "Profiling | ";
+    if ( value & ToolPurposeFlagBitsEXT::eTracing ) result += "Tracing | ";
+    if ( value & ToolPurposeFlagBitsEXT::eAdditionalFeatures ) result += "AdditionalFeatures | ";
+    if ( value & ToolPurposeFlagBitsEXT::eModifyingFeatures ) result += "ModifyingFeatures | ";
+    if ( value & ToolPurposeFlagBitsEXT::eDebugReporting ) result += "DebugReporting | ";
+    if ( value & ToolPurposeFlagBitsEXT::eDebugMarkers ) result += "DebugMarkers | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class ValidationCacheCreateFlagBitsEXT : VkFlags
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( ValidationCacheCreateFlagBitsEXT )
+  {
+    return "(void)";
+  }
+
+  using ValidationCacheCreateFlagsEXT = Flags<ValidationCacheCreateFlagBitsEXT>;
+
+  VULKAN_HPP_INLINE std::string to_string( ValidationCacheCreateFlagsEXT  )
+  {
+
+    return "{}";
+  }
+
+#ifdef VK_USE_PLATFORM_VI_NN
+  enum class ViSurfaceCreateFlagBitsNN : VkFlags
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( ViSurfaceCreateFlagBitsNN )
+  {
+    return "(void)";
+  }
+
+  using ViSurfaceCreateFlagsNN = Flags<ViSurfaceCreateFlagBitsNN>;
+
+  VULKAN_HPP_INLINE std::string to_string( ViSurfaceCreateFlagsNN  )
+  {
+
+    return "{}";
+  }
 #endif /*VK_USE_PLATFORM_VI_NN*/
 
 #ifdef VK_USE_PLATFORM_WAYLAND_KHR
-  struct WaylandSurfaceCreateInfoKHR
+  enum class WaylandSurfaceCreateFlagBitsKHR : VkFlags
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( WaylandSurfaceCreateFlagBitsKHR )
   {
-    WaylandSurfaceCreateInfoKHR( WaylandSurfaceCreateFlagsKHR flags_ = WaylandSurfaceCreateFlagsKHR(),
-                                 struct wl_display* display_ = nullptr,
-                                 struct wl_surface* surface_ = nullptr )
-      : flags( flags_ )
-      , display( display_ )
-      , surface( surface_ )
-    {
-    }
+    return "(void)";
+  }
 
-    WaylandSurfaceCreateInfoKHR( VkWaylandSurfaceCreateInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( WaylandSurfaceCreateInfoKHR ) );
-    }
+  using WaylandSurfaceCreateFlagsKHR = Flags<WaylandSurfaceCreateFlagBitsKHR>;
 
-    WaylandSurfaceCreateInfoKHR& operator=( VkWaylandSurfaceCreateInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( WaylandSurfaceCreateInfoKHR ) );
-      return *this;
-    }
-    WaylandSurfaceCreateInfoKHR& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
+  VULKAN_HPP_INLINE std::string to_string( WaylandSurfaceCreateFlagsKHR  )
+  {
 
-    WaylandSurfaceCreateInfoKHR& setFlags( WaylandSurfaceCreateFlagsKHR flags_ )
-    {
-      flags = flags_;
-      return *this;
-    }
-
-    WaylandSurfaceCreateInfoKHR& setDisplay( struct wl_display* display_ )
-    {
-      display = display_;
-      return *this;
-    }
-
-    WaylandSurfaceCreateInfoKHR& setSurface( struct wl_surface* surface_ )
-    {
-      surface = surface_;
-      return *this;
-    }
-
-    operator VkWaylandSurfaceCreateInfoKHR const&() const
-    {
-      return *reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR*>(this);
-    }
-
-    operator VkWaylandSurfaceCreateInfoKHR &()
-    {
-      return *reinterpret_cast<VkWaylandSurfaceCreateInfoKHR*>(this);
-    }
-
-    bool operator==( WaylandSurfaceCreateInfoKHR const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( display == rhs.display )
-          && ( surface == rhs.surface );
-    }
-
-    bool operator!=( WaylandSurfaceCreateInfoKHR const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eWaylandSurfaceCreateInfoKHR;
-
-  public:
-    const void* pNext = nullptr;
-    WaylandSurfaceCreateFlagsKHR flags;
-    struct wl_display* display;
-    struct wl_surface* surface;
-  };
-  static_assert( sizeof( WaylandSurfaceCreateInfoKHR ) == sizeof( VkWaylandSurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
+    return "{}";
+  }
 #endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
 
 #ifdef VK_USE_PLATFORM_WIN32_KHR
-  struct Win32SurfaceCreateInfoKHR
+  enum class Win32SurfaceCreateFlagBitsKHR : VkFlags
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( Win32SurfaceCreateFlagBitsKHR )
   {
-    Win32SurfaceCreateInfoKHR( Win32SurfaceCreateFlagsKHR flags_ = Win32SurfaceCreateFlagsKHR(),
-                               HINSTANCE hinstance_ = 0,
-                               HWND hwnd_ = 0 )
-      : flags( flags_ )
-      , hinstance( hinstance_ )
-      , hwnd( hwnd_ )
-    {
-    }
+    return "(void)";
+  }
 
-    Win32SurfaceCreateInfoKHR( VkWin32SurfaceCreateInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( Win32SurfaceCreateInfoKHR ) );
-    }
+  using Win32SurfaceCreateFlagsKHR = Flags<Win32SurfaceCreateFlagBitsKHR>;
 
-    Win32SurfaceCreateInfoKHR& operator=( VkWin32SurfaceCreateInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( Win32SurfaceCreateInfoKHR ) );
-      return *this;
-    }
-    Win32SurfaceCreateInfoKHR& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
+  VULKAN_HPP_INLINE std::string to_string( Win32SurfaceCreateFlagsKHR  )
+  {
 
-    Win32SurfaceCreateInfoKHR& setFlags( Win32SurfaceCreateFlagsKHR flags_ )
-    {
-      flags = flags_;
-      return *this;
-    }
-
-    Win32SurfaceCreateInfoKHR& setHinstance( HINSTANCE hinstance_ )
-    {
-      hinstance = hinstance_;
-      return *this;
-    }
-
-    Win32SurfaceCreateInfoKHR& setHwnd( HWND hwnd_ )
-    {
-      hwnd = hwnd_;
-      return *this;
-    }
-
-    operator VkWin32SurfaceCreateInfoKHR const&() const
-    {
-      return *reinterpret_cast<const VkWin32SurfaceCreateInfoKHR*>(this);
-    }
-
-    operator VkWin32SurfaceCreateInfoKHR &()
-    {
-      return *reinterpret_cast<VkWin32SurfaceCreateInfoKHR*>(this);
-    }
-
-    bool operator==( Win32SurfaceCreateInfoKHR const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( hinstance == rhs.hinstance )
-          && ( hwnd == rhs.hwnd );
-    }
-
-    bool operator!=( Win32SurfaceCreateInfoKHR const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eWin32SurfaceCreateInfoKHR;
-
-  public:
-    const void* pNext = nullptr;
-    Win32SurfaceCreateFlagsKHR flags;
-    HINSTANCE hinstance;
-    HWND hwnd;
-  };
-  static_assert( sizeof( Win32SurfaceCreateInfoKHR ) == sizeof( VkWin32SurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
+    return "{}";
+  }
 #endif /*VK_USE_PLATFORM_WIN32_KHR*/
-
-#ifdef VK_USE_PLATFORM_XLIB_KHR
-  struct XlibSurfaceCreateInfoKHR
-  {
-    XlibSurfaceCreateInfoKHR( XlibSurfaceCreateFlagsKHR flags_ = XlibSurfaceCreateFlagsKHR(),
-                              Display* dpy_ = nullptr,
-                              Window window_ = 0 )
-      : flags( flags_ )
-      , dpy( dpy_ )
-      , window( window_ )
-    {
-    }
-
-    XlibSurfaceCreateInfoKHR( VkXlibSurfaceCreateInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( XlibSurfaceCreateInfoKHR ) );
-    }
-
-    XlibSurfaceCreateInfoKHR& operator=( VkXlibSurfaceCreateInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( XlibSurfaceCreateInfoKHR ) );
-      return *this;
-    }
-    XlibSurfaceCreateInfoKHR& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    XlibSurfaceCreateInfoKHR& setFlags( XlibSurfaceCreateFlagsKHR flags_ )
-    {
-      flags = flags_;
-      return *this;
-    }
-
-    XlibSurfaceCreateInfoKHR& setDpy( Display* dpy_ )
-    {
-      dpy = dpy_;
-      return *this;
-    }
-
-    XlibSurfaceCreateInfoKHR& setWindow( Window window_ )
-    {
-      window = window_;
-      return *this;
-    }
-
-    operator VkXlibSurfaceCreateInfoKHR const&() const
-    {
-      return *reinterpret_cast<const VkXlibSurfaceCreateInfoKHR*>(this);
-    }
-
-    operator VkXlibSurfaceCreateInfoKHR &()
-    {
-      return *reinterpret_cast<VkXlibSurfaceCreateInfoKHR*>(this);
-    }
-
-    bool operator==( XlibSurfaceCreateInfoKHR const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( dpy == rhs.dpy )
-          && ( window == rhs.window );
-    }
-
-    bool operator!=( XlibSurfaceCreateInfoKHR const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eXlibSurfaceCreateInfoKHR;
-
-  public:
-    const void* pNext = nullptr;
-    XlibSurfaceCreateFlagsKHR flags;
-    Display* dpy;
-    Window window;
-  };
-  static_assert( sizeof( XlibSurfaceCreateInfoKHR ) == sizeof( VkXlibSurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
-#endif /*VK_USE_PLATFORM_XLIB_KHR*/
 
 #ifdef VK_USE_PLATFORM_XCB_KHR
-  struct XcbSurfaceCreateInfoKHR
+  enum class XcbSurfaceCreateFlagBitsKHR : VkFlags
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( XcbSurfaceCreateFlagBitsKHR )
   {
-    XcbSurfaceCreateInfoKHR( XcbSurfaceCreateFlagsKHR flags_ = XcbSurfaceCreateFlagsKHR(),
-                             xcb_connection_t* connection_ = nullptr,
-                             xcb_window_t window_ = 0 )
-      : flags( flags_ )
-      , connection( connection_ )
-      , window( window_ )
-    {
-    }
+    return "(void)";
+  }
 
-    XcbSurfaceCreateInfoKHR( VkXcbSurfaceCreateInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( XcbSurfaceCreateInfoKHR ) );
-    }
+  using XcbSurfaceCreateFlagsKHR = Flags<XcbSurfaceCreateFlagBitsKHR>;
 
-    XcbSurfaceCreateInfoKHR& operator=( VkXcbSurfaceCreateInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( XcbSurfaceCreateInfoKHR ) );
-      return *this;
-    }
-    XcbSurfaceCreateInfoKHR& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
+  VULKAN_HPP_INLINE std::string to_string( XcbSurfaceCreateFlagsKHR  )
+  {
 
-    XcbSurfaceCreateInfoKHR& setFlags( XcbSurfaceCreateFlagsKHR flags_ )
-    {
-      flags = flags_;
-      return *this;
-    }
-
-    XcbSurfaceCreateInfoKHR& setConnection( xcb_connection_t* connection_ )
-    {
-      connection = connection_;
-      return *this;
-    }
-
-    XcbSurfaceCreateInfoKHR& setWindow( xcb_window_t window_ )
-    {
-      window = window_;
-      return *this;
-    }
-
-    operator VkXcbSurfaceCreateInfoKHR const&() const
-    {
-      return *reinterpret_cast<const VkXcbSurfaceCreateInfoKHR*>(this);
-    }
-
-    operator VkXcbSurfaceCreateInfoKHR &()
-    {
-      return *reinterpret_cast<VkXcbSurfaceCreateInfoKHR*>(this);
-    }
-
-    bool operator==( XcbSurfaceCreateInfoKHR const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( connection == rhs.connection )
-          && ( window == rhs.window );
-    }
-
-    bool operator!=( XcbSurfaceCreateInfoKHR const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eXcbSurfaceCreateInfoKHR;
-
-  public:
-    const void* pNext = nullptr;
-    XcbSurfaceCreateFlagsKHR flags;
-    xcb_connection_t* connection;
-    xcb_window_t window;
-  };
-  static_assert( sizeof( XcbSurfaceCreateInfoKHR ) == sizeof( VkXcbSurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
+    return "{}";
+  }
 #endif /*VK_USE_PLATFORM_XCB_KHR*/
 
-#ifdef VK_USE_PLATFORM_FUCHSIA_FUCHSIA
-  struct ImagePipeSurfaceCreateInfoFUCHSIA
-  {
-    ImagePipeSurfaceCreateInfoFUCHSIA( ImagePipeSurfaceCreateFlagsFUCHSIA flags_ = ImagePipeSurfaceCreateFlagsFUCHSIA(),
-                                       zx_handle_t imagePipeHandle_ = 0 )
-      : flags( flags_ )
-      , imagePipeHandle( imagePipeHandle_ )
-    {
-    }
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+  enum class XlibSurfaceCreateFlagBitsKHR : VkFlags
+  {};
 
-    ImagePipeSurfaceCreateInfoFUCHSIA( VkImagePipeSurfaceCreateInfoFUCHSIA const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ImagePipeSurfaceCreateInfoFUCHSIA ) );
-    }
-
-    ImagePipeSurfaceCreateInfoFUCHSIA& operator=( VkImagePipeSurfaceCreateInfoFUCHSIA const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ImagePipeSurfaceCreateInfoFUCHSIA ) );
-      return *this;
-    }
-    ImagePipeSurfaceCreateInfoFUCHSIA& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    ImagePipeSurfaceCreateInfoFUCHSIA& setFlags( ImagePipeSurfaceCreateFlagsFUCHSIA flags_ )
-    {
-      flags = flags_;
-      return *this;
-    }
-
-    ImagePipeSurfaceCreateInfoFUCHSIA& setImagePipeHandle( zx_handle_t imagePipeHandle_ )
-    {
-      imagePipeHandle = imagePipeHandle_;
-      return *this;
-    }
-
-    operator VkImagePipeSurfaceCreateInfoFUCHSIA const&() const
-    {
-      return *reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA*>(this);
-    }
-
-    operator VkImagePipeSurfaceCreateInfoFUCHSIA &()
-    {
-      return *reinterpret_cast<VkImagePipeSurfaceCreateInfoFUCHSIA*>(this);
-    }
-
-    bool operator==( ImagePipeSurfaceCreateInfoFUCHSIA const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( imagePipeHandle == rhs.imagePipeHandle );
-    }
-
-    bool operator!=( ImagePipeSurfaceCreateInfoFUCHSIA const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eImagepipeSurfaceCreateInfoFUCHSIA;
-
-  public:
-    const void* pNext = nullptr;
-    ImagePipeSurfaceCreateFlagsFUCHSIA flags;
-    zx_handle_t imagePipeHandle;
-  };
-  static_assert( sizeof( ImagePipeSurfaceCreateInfoFUCHSIA ) == sizeof( VkImagePipeSurfaceCreateInfoFUCHSIA ), "struct and wrapper have different size!" );
-#endif /*VK_USE_PLATFORM_FUCHSIA_FUCHSIA*/
-
-  struct DebugMarkerMarkerInfoEXT
+  VULKAN_HPP_INLINE std::string to_string( XlibSurfaceCreateFlagBitsKHR )
   {
-    DebugMarkerMarkerInfoEXT( const char* pMarkerName_ = nullptr,
-                              std::array<float,4> const& color_ = { { 0, 0, 0, 0 } } )
-      : pMarkerName( pMarkerName_ )
-    {
-      memcpy( &color, color_.data(), 4 * sizeof( float ) );
-    }
+    return "(void)";
+  }
 
-    DebugMarkerMarkerInfoEXT( VkDebugMarkerMarkerInfoEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DebugMarkerMarkerInfoEXT ) );
-    }
+  using XlibSurfaceCreateFlagsKHR = Flags<XlibSurfaceCreateFlagBitsKHR>;
 
-    DebugMarkerMarkerInfoEXT& operator=( VkDebugMarkerMarkerInfoEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DebugMarkerMarkerInfoEXT ) );
-      return *this;
-    }
-    DebugMarkerMarkerInfoEXT& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    DebugMarkerMarkerInfoEXT& setPMarkerName( const char* pMarkerName_ )
-    {
-      pMarkerName = pMarkerName_;
-      return *this;
-    }
-
-    DebugMarkerMarkerInfoEXT& setColor( std::array<float,4> color_ )
-    {
-      memcpy( &color, color_.data(), 4 * sizeof( float ) );
-      return *this;
-    }
-
-    operator VkDebugMarkerMarkerInfoEXT const&() const
-    {
-      return *reinterpret_cast<const VkDebugMarkerMarkerInfoEXT*>(this);
-    }
-
-    operator VkDebugMarkerMarkerInfoEXT &()
-    {
-      return *reinterpret_cast<VkDebugMarkerMarkerInfoEXT*>(this);
-    }
-
-    bool operator==( DebugMarkerMarkerInfoEXT const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( pMarkerName == rhs.pMarkerName )
-          && ( memcmp( color, rhs.color, 4 * sizeof( float ) ) == 0 );
-    }
-
-    bool operator!=( DebugMarkerMarkerInfoEXT const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eDebugMarkerMarkerInfoEXT;
-
-  public:
-    const void* pNext = nullptr;
-    const char* pMarkerName;
-    float color[4];
-  };
-  static_assert( sizeof( DebugMarkerMarkerInfoEXT ) == sizeof( VkDebugMarkerMarkerInfoEXT ), "struct and wrapper have different size!" );
-
-  struct DedicatedAllocationImageCreateInfoNV
+  VULKAN_HPP_INLINE std::string to_string( XlibSurfaceCreateFlagsKHR  )
   {
-    DedicatedAllocationImageCreateInfoNV( Bool32 dedicatedAllocation_ = 0 )
-      : dedicatedAllocation( dedicatedAllocation_ )
-    {
-    }
 
-    DedicatedAllocationImageCreateInfoNV( VkDedicatedAllocationImageCreateInfoNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DedicatedAllocationImageCreateInfoNV ) );
-    }
+    return "{}";
+  }
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+} // namespace VULKAN_HPP_NAMESPACE
 
-    DedicatedAllocationImageCreateInfoNV& operator=( VkDedicatedAllocationImageCreateInfoNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DedicatedAllocationImageCreateInfoNV ) );
-      return *this;
-    }
-    DedicatedAllocationImageCreateInfoNV& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+namespace std
+{
+  template <>
+  struct is_error_code_enum<VULKAN_HPP_NAMESPACE::Result> : public true_type
+  {};
+}
+#endif
 
-    DedicatedAllocationImageCreateInfoNV& setDedicatedAllocation( Bool32 dedicatedAllocation_ )
-    {
-      dedicatedAllocation = dedicatedAllocation_;
-      return *this;
-    }
-
-    operator VkDedicatedAllocationImageCreateInfoNV const&() const
-    {
-      return *reinterpret_cast<const VkDedicatedAllocationImageCreateInfoNV*>(this);
-    }
-
-    operator VkDedicatedAllocationImageCreateInfoNV &()
-    {
-      return *reinterpret_cast<VkDedicatedAllocationImageCreateInfoNV*>(this);
-    }
-
-    bool operator==( DedicatedAllocationImageCreateInfoNV const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( dedicatedAllocation == rhs.dedicatedAllocation );
-    }
-
-    bool operator!=( DedicatedAllocationImageCreateInfoNV const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eDedicatedAllocationImageCreateInfoNV;
-
-  public:
-    const void* pNext = nullptr;
-    Bool32 dedicatedAllocation;
-  };
-  static_assert( sizeof( DedicatedAllocationImageCreateInfoNV ) == sizeof( VkDedicatedAllocationImageCreateInfoNV ), "struct and wrapper have different size!" );
-
-  struct DedicatedAllocationBufferCreateInfoNV
+namespace VULKAN_HPP_NAMESPACE
+{
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  class ErrorCategoryImpl : public std::error_category
   {
-    DedicatedAllocationBufferCreateInfoNV( Bool32 dedicatedAllocation_ = 0 )
-      : dedicatedAllocation( dedicatedAllocation_ )
-    {
-    }
-
-    DedicatedAllocationBufferCreateInfoNV( VkDedicatedAllocationBufferCreateInfoNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DedicatedAllocationBufferCreateInfoNV ) );
-    }
-
-    DedicatedAllocationBufferCreateInfoNV& operator=( VkDedicatedAllocationBufferCreateInfoNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DedicatedAllocationBufferCreateInfoNV ) );
-      return *this;
-    }
-    DedicatedAllocationBufferCreateInfoNV& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    DedicatedAllocationBufferCreateInfoNV& setDedicatedAllocation( Bool32 dedicatedAllocation_ )
-    {
-      dedicatedAllocation = dedicatedAllocation_;
-      return *this;
-    }
-
-    operator VkDedicatedAllocationBufferCreateInfoNV const&() const
-    {
-      return *reinterpret_cast<const VkDedicatedAllocationBufferCreateInfoNV*>(this);
-    }
-
-    operator VkDedicatedAllocationBufferCreateInfoNV &()
-    {
-      return *reinterpret_cast<VkDedicatedAllocationBufferCreateInfoNV*>(this);
-    }
-
-    bool operator==( DedicatedAllocationBufferCreateInfoNV const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( dedicatedAllocation == rhs.dedicatedAllocation );
-    }
-
-    bool operator!=( DedicatedAllocationBufferCreateInfoNV const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eDedicatedAllocationBufferCreateInfoNV;
-
-  public:
-    const void* pNext = nullptr;
-    Bool32 dedicatedAllocation;
+    public:
+    virtual const char* name() const VULKAN_HPP_NOEXCEPT override { return VULKAN_HPP_NAMESPACE_STRING"::Result"; }
+    virtual std::string message(int ev) const override { return to_string(static_cast<Result>(ev)); }
   };
-  static_assert( sizeof( DedicatedAllocationBufferCreateInfoNV ) == sizeof( VkDedicatedAllocationBufferCreateInfoNV ), "struct and wrapper have different size!" );
 
-  struct DedicatedAllocationMemoryAllocateInfoNV
+  class Error
   {
-    DedicatedAllocationMemoryAllocateInfoNV( Image image_ = Image(),
-                                             Buffer buffer_ = Buffer() )
-      : image( image_ )
-      , buffer( buffer_ )
-    {
-    }
+    public:
+    Error() VULKAN_HPP_NOEXCEPT = default;
+    Error(const Error&) VULKAN_HPP_NOEXCEPT = default;
+    virtual ~Error() VULKAN_HPP_NOEXCEPT = default;
 
-    DedicatedAllocationMemoryAllocateInfoNV( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DedicatedAllocationMemoryAllocateInfoNV ) );
-    }
-
-    DedicatedAllocationMemoryAllocateInfoNV& operator=( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DedicatedAllocationMemoryAllocateInfoNV ) );
-      return *this;
-    }
-    DedicatedAllocationMemoryAllocateInfoNV& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    DedicatedAllocationMemoryAllocateInfoNV& setImage( Image image_ )
-    {
-      image = image_;
-      return *this;
-    }
-
-    DedicatedAllocationMemoryAllocateInfoNV& setBuffer( Buffer buffer_ )
-    {
-      buffer = buffer_;
-      return *this;
-    }
-
-    operator VkDedicatedAllocationMemoryAllocateInfoNV const&() const
-    {
-      return *reinterpret_cast<const VkDedicatedAllocationMemoryAllocateInfoNV*>(this);
-    }
-
-    operator VkDedicatedAllocationMemoryAllocateInfoNV &()
-    {
-      return *reinterpret_cast<VkDedicatedAllocationMemoryAllocateInfoNV*>(this);
-    }
-
-    bool operator==( DedicatedAllocationMemoryAllocateInfoNV const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( image == rhs.image )
-          && ( buffer == rhs.buffer );
-    }
-
-    bool operator!=( DedicatedAllocationMemoryAllocateInfoNV const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eDedicatedAllocationMemoryAllocateInfoNV;
-
-  public:
-    const void* pNext = nullptr;
-    Image image;
-    Buffer buffer;
+    virtual const char* what() const VULKAN_HPP_NOEXCEPT = 0;
   };
-  static_assert( sizeof( DedicatedAllocationMemoryAllocateInfoNV ) == sizeof( VkDedicatedAllocationMemoryAllocateInfoNV ), "struct and wrapper have different size!" );
 
-#ifdef VK_USE_PLATFORM_WIN32_NV
-  struct ExportMemoryWin32HandleInfoNV
+  class LogicError : public Error, public std::logic_error
   {
-    ExportMemoryWin32HandleInfoNV( const SECURITY_ATTRIBUTES* pAttributes_ = nullptr,
-                                   DWORD dwAccess_ = 0 )
-      : pAttributes( pAttributes_ )
-      , dwAccess( dwAccess_ )
-    {
-    }
+    public:
+    explicit LogicError( const std::string& what )
+      : Error(), std::logic_error(what) {}
+    explicit LogicError( char const * what )
+      : Error(), std::logic_error(what) {}
 
-    ExportMemoryWin32HandleInfoNV( VkExportMemoryWin32HandleInfoNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ExportMemoryWin32HandleInfoNV ) );
-    }
-
-    ExportMemoryWin32HandleInfoNV& operator=( VkExportMemoryWin32HandleInfoNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ExportMemoryWin32HandleInfoNV ) );
-      return *this;
-    }
-    ExportMemoryWin32HandleInfoNV& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    ExportMemoryWin32HandleInfoNV& setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ )
-    {
-      pAttributes = pAttributes_;
-      return *this;
-    }
-
-    ExportMemoryWin32HandleInfoNV& setDwAccess( DWORD dwAccess_ )
-    {
-      dwAccess = dwAccess_;
-      return *this;
-    }
-
-    operator VkExportMemoryWin32HandleInfoNV const&() const
-    {
-      return *reinterpret_cast<const VkExportMemoryWin32HandleInfoNV*>(this);
-    }
-
-    operator VkExportMemoryWin32HandleInfoNV &()
-    {
-      return *reinterpret_cast<VkExportMemoryWin32HandleInfoNV*>(this);
-    }
-
-    bool operator==( ExportMemoryWin32HandleInfoNV const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( pAttributes == rhs.pAttributes )
-          && ( dwAccess == rhs.dwAccess );
-    }
-
-    bool operator!=( ExportMemoryWin32HandleInfoNV const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eExportMemoryWin32HandleInfoNV;
-
-  public:
-    const void* pNext = nullptr;
-    const SECURITY_ATTRIBUTES* pAttributes;
-    DWORD dwAccess;
+    virtual const char* what() const VULKAN_HPP_NOEXCEPT { return std::logic_error::what(); }
   };
-  static_assert( sizeof( ExportMemoryWin32HandleInfoNV ) == sizeof( VkExportMemoryWin32HandleInfoNV ), "struct and wrapper have different size!" );
-#endif /*VK_USE_PLATFORM_WIN32_NV*/
 
-#ifdef VK_USE_PLATFORM_WIN32_NV
-  struct Win32KeyedMutexAcquireReleaseInfoNV
+  class SystemError : public Error, public std::system_error
   {
-    Win32KeyedMutexAcquireReleaseInfoNV( uint32_t acquireCount_ = 0,
-                                         const DeviceMemory* pAcquireSyncs_ = nullptr,
-                                         const uint64_t* pAcquireKeys_ = nullptr,
-                                         const uint32_t* pAcquireTimeoutMilliseconds_ = nullptr,
-                                         uint32_t releaseCount_ = 0,
-                                         const DeviceMemory* pReleaseSyncs_ = nullptr,
-                                         const uint64_t* pReleaseKeys_ = nullptr )
-      : acquireCount( acquireCount_ )
-      , pAcquireSyncs( pAcquireSyncs_ )
-      , pAcquireKeys( pAcquireKeys_ )
-      , pAcquireTimeoutMilliseconds( pAcquireTimeoutMilliseconds_ )
-      , releaseCount( releaseCount_ )
-      , pReleaseSyncs( pReleaseSyncs_ )
-      , pReleaseKeys( pReleaseKeys_ )
-    {
-    }
+    public:
+    SystemError( std::error_code ec )
+      : Error(), std::system_error(ec) {}
+    SystemError( std::error_code ec, std::string const& what )
+      : Error(), std::system_error(ec, what) {}
+    SystemError( std::error_code ec, char const * what )
+      : Error(), std::system_error(ec, what) {}
+    SystemError( int ev, std::error_category const& ecat )
+      : Error(), std::system_error(ev, ecat) {}
+    SystemError( int ev, std::error_category const& ecat, std::string const& what)
+      : Error(), std::system_error(ev, ecat, what) {}
+    SystemError( int ev, std::error_category const& ecat, char const * what)
+      : Error(), std::system_error(ev, ecat, what) {}
 
-    Win32KeyedMutexAcquireReleaseInfoNV( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( Win32KeyedMutexAcquireReleaseInfoNV ) );
-    }
-
-    Win32KeyedMutexAcquireReleaseInfoNV& operator=( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( Win32KeyedMutexAcquireReleaseInfoNV ) );
-      return *this;
-    }
-    Win32KeyedMutexAcquireReleaseInfoNV& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    Win32KeyedMutexAcquireReleaseInfoNV& setAcquireCount( uint32_t acquireCount_ )
-    {
-      acquireCount = acquireCount_;
-      return *this;
-    }
-
-    Win32KeyedMutexAcquireReleaseInfoNV& setPAcquireSyncs( const DeviceMemory* pAcquireSyncs_ )
-    {
-      pAcquireSyncs = pAcquireSyncs_;
-      return *this;
-    }
-
-    Win32KeyedMutexAcquireReleaseInfoNV& setPAcquireKeys( const uint64_t* pAcquireKeys_ )
-    {
-      pAcquireKeys = pAcquireKeys_;
-      return *this;
-    }
-
-    Win32KeyedMutexAcquireReleaseInfoNV& setPAcquireTimeoutMilliseconds( const uint32_t* pAcquireTimeoutMilliseconds_ )
-    {
-      pAcquireTimeoutMilliseconds = pAcquireTimeoutMilliseconds_;
-      return *this;
-    }
-
-    Win32KeyedMutexAcquireReleaseInfoNV& setReleaseCount( uint32_t releaseCount_ )
-    {
-      releaseCount = releaseCount_;
-      return *this;
-    }
-
-    Win32KeyedMutexAcquireReleaseInfoNV& setPReleaseSyncs( const DeviceMemory* pReleaseSyncs_ )
-    {
-      pReleaseSyncs = pReleaseSyncs_;
-      return *this;
-    }
-
-    Win32KeyedMutexAcquireReleaseInfoNV& setPReleaseKeys( const uint64_t* pReleaseKeys_ )
-    {
-      pReleaseKeys = pReleaseKeys_;
-      return *this;
-    }
-
-    operator VkWin32KeyedMutexAcquireReleaseInfoNV const&() const
-    {
-      return *reinterpret_cast<const VkWin32KeyedMutexAcquireReleaseInfoNV*>(this);
-    }
-
-    operator VkWin32KeyedMutexAcquireReleaseInfoNV &()
-    {
-      return *reinterpret_cast<VkWin32KeyedMutexAcquireReleaseInfoNV*>(this);
-    }
-
-    bool operator==( Win32KeyedMutexAcquireReleaseInfoNV const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( acquireCount == rhs.acquireCount )
-          && ( pAcquireSyncs == rhs.pAcquireSyncs )
-          && ( pAcquireKeys == rhs.pAcquireKeys )
-          && ( pAcquireTimeoutMilliseconds == rhs.pAcquireTimeoutMilliseconds )
-          && ( releaseCount == rhs.releaseCount )
-          && ( pReleaseSyncs == rhs.pReleaseSyncs )
-          && ( pReleaseKeys == rhs.pReleaseKeys );
-    }
-
-    bool operator!=( Win32KeyedMutexAcquireReleaseInfoNV const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eWin32KeyedMutexAcquireReleaseInfoNV;
-
-  public:
-    const void* pNext = nullptr;
-    uint32_t acquireCount;
-    const DeviceMemory* pAcquireSyncs;
-    const uint64_t* pAcquireKeys;
-    const uint32_t* pAcquireTimeoutMilliseconds;
-    uint32_t releaseCount;
-    const DeviceMemory* pReleaseSyncs;
-    const uint64_t* pReleaseKeys;
+    virtual const char* what() const VULKAN_HPP_NOEXCEPT { return std::system_error::what(); }
   };
-  static_assert( sizeof( Win32KeyedMutexAcquireReleaseInfoNV ) == sizeof( VkWin32KeyedMutexAcquireReleaseInfoNV ), "struct and wrapper have different size!" );
-#endif /*VK_USE_PLATFORM_WIN32_NV*/
 
-  struct DeviceGeneratedCommandsFeaturesNVX
+  VULKAN_HPP_INLINE const std::error_category& errorCategory() VULKAN_HPP_NOEXCEPT
   {
-    DeviceGeneratedCommandsFeaturesNVX( Bool32 computeBindingPointSupport_ = 0 )
-      : computeBindingPointSupport( computeBindingPointSupport_ )
-    {
-    }
+    static ErrorCategoryImpl instance;
+    return instance;
+  }
 
-    DeviceGeneratedCommandsFeaturesNVX( VkDeviceGeneratedCommandsFeaturesNVX const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DeviceGeneratedCommandsFeaturesNVX ) );
-    }
-
-    DeviceGeneratedCommandsFeaturesNVX& operator=( VkDeviceGeneratedCommandsFeaturesNVX const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DeviceGeneratedCommandsFeaturesNVX ) );
-      return *this;
-    }
-    DeviceGeneratedCommandsFeaturesNVX& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    DeviceGeneratedCommandsFeaturesNVX& setComputeBindingPointSupport( Bool32 computeBindingPointSupport_ )
-    {
-      computeBindingPointSupport = computeBindingPointSupport_;
-      return *this;
-    }
-
-    operator VkDeviceGeneratedCommandsFeaturesNVX const&() const
-    {
-      return *reinterpret_cast<const VkDeviceGeneratedCommandsFeaturesNVX*>(this);
-    }
-
-    operator VkDeviceGeneratedCommandsFeaturesNVX &()
-    {
-      return *reinterpret_cast<VkDeviceGeneratedCommandsFeaturesNVX*>(this);
-    }
-
-    bool operator==( DeviceGeneratedCommandsFeaturesNVX const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( computeBindingPointSupport == rhs.computeBindingPointSupport );
-    }
-
-    bool operator!=( DeviceGeneratedCommandsFeaturesNVX const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eDeviceGeneratedCommandsFeaturesNVX;
-
-  public:
-    const void* pNext = nullptr;
-    Bool32 computeBindingPointSupport;
-  };
-  static_assert( sizeof( DeviceGeneratedCommandsFeaturesNVX ) == sizeof( VkDeviceGeneratedCommandsFeaturesNVX ), "struct and wrapper have different size!" );
-
-  struct DeviceGeneratedCommandsLimitsNVX
+  VULKAN_HPP_INLINE std::error_code make_error_code(Result e) VULKAN_HPP_NOEXCEPT
   {
-    DeviceGeneratedCommandsLimitsNVX( uint32_t maxIndirectCommandsLayoutTokenCount_ = 0,
-                                      uint32_t maxObjectEntryCounts_ = 0,
-                                      uint32_t minSequenceCountBufferOffsetAlignment_ = 0,
-                                      uint32_t minSequenceIndexBufferOffsetAlignment_ = 0,
-                                      uint32_t minCommandsTokenBufferOffsetAlignment_ = 0 )
-      : maxIndirectCommandsLayoutTokenCount( maxIndirectCommandsLayoutTokenCount_ )
-      , maxObjectEntryCounts( maxObjectEntryCounts_ )
-      , minSequenceCountBufferOffsetAlignment( minSequenceCountBufferOffsetAlignment_ )
-      , minSequenceIndexBufferOffsetAlignment( minSequenceIndexBufferOffsetAlignment_ )
-      , minCommandsTokenBufferOffsetAlignment( minCommandsTokenBufferOffsetAlignment_ )
-    {
-    }
+    return std::error_code(static_cast<int>(e), errorCategory());
+  }
 
-    DeviceGeneratedCommandsLimitsNVX( VkDeviceGeneratedCommandsLimitsNVX const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DeviceGeneratedCommandsLimitsNVX ) );
-    }
-
-    DeviceGeneratedCommandsLimitsNVX& operator=( VkDeviceGeneratedCommandsLimitsNVX const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DeviceGeneratedCommandsLimitsNVX ) );
-      return *this;
-    }
-    DeviceGeneratedCommandsLimitsNVX& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    DeviceGeneratedCommandsLimitsNVX& setMaxIndirectCommandsLayoutTokenCount( uint32_t maxIndirectCommandsLayoutTokenCount_ )
-    {
-      maxIndirectCommandsLayoutTokenCount = maxIndirectCommandsLayoutTokenCount_;
-      return *this;
-    }
-
-    DeviceGeneratedCommandsLimitsNVX& setMaxObjectEntryCounts( uint32_t maxObjectEntryCounts_ )
-    {
-      maxObjectEntryCounts = maxObjectEntryCounts_;
-      return *this;
-    }
-
-    DeviceGeneratedCommandsLimitsNVX& setMinSequenceCountBufferOffsetAlignment( uint32_t minSequenceCountBufferOffsetAlignment_ )
-    {
-      minSequenceCountBufferOffsetAlignment = minSequenceCountBufferOffsetAlignment_;
-      return *this;
-    }
-
-    DeviceGeneratedCommandsLimitsNVX& setMinSequenceIndexBufferOffsetAlignment( uint32_t minSequenceIndexBufferOffsetAlignment_ )
-    {
-      minSequenceIndexBufferOffsetAlignment = minSequenceIndexBufferOffsetAlignment_;
-      return *this;
-    }
-
-    DeviceGeneratedCommandsLimitsNVX& setMinCommandsTokenBufferOffsetAlignment( uint32_t minCommandsTokenBufferOffsetAlignment_ )
-    {
-      minCommandsTokenBufferOffsetAlignment = minCommandsTokenBufferOffsetAlignment_;
-      return *this;
-    }
-
-    operator VkDeviceGeneratedCommandsLimitsNVX const&() const
-    {
-      return *reinterpret_cast<const VkDeviceGeneratedCommandsLimitsNVX*>(this);
-    }
-
-    operator VkDeviceGeneratedCommandsLimitsNVX &()
-    {
-      return *reinterpret_cast<VkDeviceGeneratedCommandsLimitsNVX*>(this);
-    }
-
-    bool operator==( DeviceGeneratedCommandsLimitsNVX const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( maxIndirectCommandsLayoutTokenCount == rhs.maxIndirectCommandsLayoutTokenCount )
-          && ( maxObjectEntryCounts == rhs.maxObjectEntryCounts )
-          && ( minSequenceCountBufferOffsetAlignment == rhs.minSequenceCountBufferOffsetAlignment )
-          && ( minSequenceIndexBufferOffsetAlignment == rhs.minSequenceIndexBufferOffsetAlignment )
-          && ( minCommandsTokenBufferOffsetAlignment == rhs.minCommandsTokenBufferOffsetAlignment );
-    }
-
-    bool operator!=( DeviceGeneratedCommandsLimitsNVX const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eDeviceGeneratedCommandsLimitsNVX;
-
-  public:
-    const void* pNext = nullptr;
-    uint32_t maxIndirectCommandsLayoutTokenCount;
-    uint32_t maxObjectEntryCounts;
-    uint32_t minSequenceCountBufferOffsetAlignment;
-    uint32_t minSequenceIndexBufferOffsetAlignment;
-    uint32_t minCommandsTokenBufferOffsetAlignment;
-  };
-  static_assert( sizeof( DeviceGeneratedCommandsLimitsNVX ) == sizeof( VkDeviceGeneratedCommandsLimitsNVX ), "struct and wrapper have different size!" );
-
-  struct CmdReserveSpaceForCommandsInfoNVX
+  VULKAN_HPP_INLINE std::error_condition make_error_condition(Result e) VULKAN_HPP_NOEXCEPT
   {
-    CmdReserveSpaceForCommandsInfoNVX( ObjectTableNVX objectTable_ = ObjectTableNVX(),
-                                       IndirectCommandsLayoutNVX indirectCommandsLayout_ = IndirectCommandsLayoutNVX(),
-                                       uint32_t maxSequencesCount_ = 0 )
-      : objectTable( objectTable_ )
-      , indirectCommandsLayout( indirectCommandsLayout_ )
-      , maxSequencesCount( maxSequencesCount_ )
-    {
-    }
+    return std::error_condition(static_cast<int>(e), errorCategory());
+  }
 
-    CmdReserveSpaceForCommandsInfoNVX( VkCmdReserveSpaceForCommandsInfoNVX const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( CmdReserveSpaceForCommandsInfoNVX ) );
-    }
-
-    CmdReserveSpaceForCommandsInfoNVX& operator=( VkCmdReserveSpaceForCommandsInfoNVX const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( CmdReserveSpaceForCommandsInfoNVX ) );
-      return *this;
-    }
-    CmdReserveSpaceForCommandsInfoNVX& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    CmdReserveSpaceForCommandsInfoNVX& setObjectTable( ObjectTableNVX objectTable_ )
-    {
-      objectTable = objectTable_;
-      return *this;
-    }
-
-    CmdReserveSpaceForCommandsInfoNVX& setIndirectCommandsLayout( IndirectCommandsLayoutNVX indirectCommandsLayout_ )
-    {
-      indirectCommandsLayout = indirectCommandsLayout_;
-      return *this;
-    }
-
-    CmdReserveSpaceForCommandsInfoNVX& setMaxSequencesCount( uint32_t maxSequencesCount_ )
-    {
-      maxSequencesCount = maxSequencesCount_;
-      return *this;
-    }
-
-    operator VkCmdReserveSpaceForCommandsInfoNVX const&() const
-    {
-      return *reinterpret_cast<const VkCmdReserveSpaceForCommandsInfoNVX*>(this);
-    }
-
-    operator VkCmdReserveSpaceForCommandsInfoNVX &()
-    {
-      return *reinterpret_cast<VkCmdReserveSpaceForCommandsInfoNVX*>(this);
-    }
-
-    bool operator==( CmdReserveSpaceForCommandsInfoNVX const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( objectTable == rhs.objectTable )
-          && ( indirectCommandsLayout == rhs.indirectCommandsLayout )
-          && ( maxSequencesCount == rhs.maxSequencesCount );
-    }
-
-    bool operator!=( CmdReserveSpaceForCommandsInfoNVX const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eCmdReserveSpaceForCommandsInfoNVX;
-
-  public:
-    const void* pNext = nullptr;
-    ObjectTableNVX objectTable;
-    IndirectCommandsLayoutNVX indirectCommandsLayout;
-    uint32_t maxSequencesCount;
-  };
-  static_assert( sizeof( CmdReserveSpaceForCommandsInfoNVX ) == sizeof( VkCmdReserveSpaceForCommandsInfoNVX ), "struct and wrapper have different size!" );
-
-  struct PhysicalDeviceFeatures2
+  class OutOfHostMemoryError : public SystemError
   {
-    PhysicalDeviceFeatures2( PhysicalDeviceFeatures features_ = PhysicalDeviceFeatures() )
-      : features( features_ )
-    {
-    }
-
-    PhysicalDeviceFeatures2( VkPhysicalDeviceFeatures2 const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceFeatures2 ) );
-    }
-
-    PhysicalDeviceFeatures2& operator=( VkPhysicalDeviceFeatures2 const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceFeatures2 ) );
-      return *this;
-    }
-    PhysicalDeviceFeatures2& setPNext( void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    PhysicalDeviceFeatures2& setFeatures( PhysicalDeviceFeatures features_ )
-    {
-      features = features_;
-      return *this;
-    }
-
-    operator VkPhysicalDeviceFeatures2 const&() const
-    {
-      return *reinterpret_cast<const VkPhysicalDeviceFeatures2*>(this);
-    }
-
-    operator VkPhysicalDeviceFeatures2 &()
-    {
-      return *reinterpret_cast<VkPhysicalDeviceFeatures2*>(this);
-    }
-
-    bool operator==( PhysicalDeviceFeatures2 const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( features == rhs.features );
-    }
-
-    bool operator!=( PhysicalDeviceFeatures2 const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePhysicalDeviceFeatures2;
-
   public:
-    void* pNext = nullptr;
-    PhysicalDeviceFeatures features;
+    OutOfHostMemoryError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorOutOfHostMemory ), message ) {}
+    OutOfHostMemoryError( char const * message )
+      : SystemError( make_error_code( Result::eErrorOutOfHostMemory ), message ) {}
   };
-  static_assert( sizeof( PhysicalDeviceFeatures2 ) == sizeof( VkPhysicalDeviceFeatures2 ), "struct and wrapper have different size!" );
 
-  using PhysicalDeviceFeatures2KHR = PhysicalDeviceFeatures2;
-
-  struct PhysicalDevicePushDescriptorPropertiesKHR
+  class OutOfDeviceMemoryError : public SystemError
   {
-    PhysicalDevicePushDescriptorPropertiesKHR( uint32_t maxPushDescriptors_ = 0 )
-      : maxPushDescriptors( maxPushDescriptors_ )
-    {
-    }
-
-    PhysicalDevicePushDescriptorPropertiesKHR( VkPhysicalDevicePushDescriptorPropertiesKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDevicePushDescriptorPropertiesKHR ) );
-    }
-
-    PhysicalDevicePushDescriptorPropertiesKHR& operator=( VkPhysicalDevicePushDescriptorPropertiesKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDevicePushDescriptorPropertiesKHR ) );
-      return *this;
-    }
-    PhysicalDevicePushDescriptorPropertiesKHR& setPNext( void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    PhysicalDevicePushDescriptorPropertiesKHR& setMaxPushDescriptors( uint32_t maxPushDescriptors_ )
-    {
-      maxPushDescriptors = maxPushDescriptors_;
-      return *this;
-    }
-
-    operator VkPhysicalDevicePushDescriptorPropertiesKHR const&() const
-    {
-      return *reinterpret_cast<const VkPhysicalDevicePushDescriptorPropertiesKHR*>(this);
-    }
-
-    operator VkPhysicalDevicePushDescriptorPropertiesKHR &()
-    {
-      return *reinterpret_cast<VkPhysicalDevicePushDescriptorPropertiesKHR*>(this);
-    }
-
-    bool operator==( PhysicalDevicePushDescriptorPropertiesKHR const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( maxPushDescriptors == rhs.maxPushDescriptors );
-    }
-
-    bool operator!=( PhysicalDevicePushDescriptorPropertiesKHR const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePhysicalDevicePushDescriptorPropertiesKHR;
-
   public:
-    void* pNext = nullptr;
-    uint32_t maxPushDescriptors;
+    OutOfDeviceMemoryError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorOutOfDeviceMemory ), message ) {}
+    OutOfDeviceMemoryError( char const * message )
+      : SystemError( make_error_code( Result::eErrorOutOfDeviceMemory ), message ) {}
   };
-  static_assert( sizeof( PhysicalDevicePushDescriptorPropertiesKHR ) == sizeof( VkPhysicalDevicePushDescriptorPropertiesKHR ), "struct and wrapper have different size!" );
 
-  struct PresentRegionsKHR
+  class InitializationFailedError : public SystemError
   {
-    PresentRegionsKHR( uint32_t swapchainCount_ = 0,
-                       const PresentRegionKHR* pRegions_ = nullptr )
-      : swapchainCount( swapchainCount_ )
-      , pRegions( pRegions_ )
-    {
-    }
-
-    PresentRegionsKHR( VkPresentRegionsKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PresentRegionsKHR ) );
-    }
-
-    PresentRegionsKHR& operator=( VkPresentRegionsKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PresentRegionsKHR ) );
-      return *this;
-    }
-    PresentRegionsKHR& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    PresentRegionsKHR& setSwapchainCount( uint32_t swapchainCount_ )
-    {
-      swapchainCount = swapchainCount_;
-      return *this;
-    }
-
-    PresentRegionsKHR& setPRegions( const PresentRegionKHR* pRegions_ )
-    {
-      pRegions = pRegions_;
-      return *this;
-    }
-
-    operator VkPresentRegionsKHR const&() const
-    {
-      return *reinterpret_cast<const VkPresentRegionsKHR*>(this);
-    }
-
-    operator VkPresentRegionsKHR &()
-    {
-      return *reinterpret_cast<VkPresentRegionsKHR*>(this);
-    }
-
-    bool operator==( PresentRegionsKHR const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( swapchainCount == rhs.swapchainCount )
-          && ( pRegions == rhs.pRegions );
-    }
-
-    bool operator!=( PresentRegionsKHR const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePresentRegionsKHR;
-
   public:
-    const void* pNext = nullptr;
-    uint32_t swapchainCount;
-    const PresentRegionKHR* pRegions;
+    InitializationFailedError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorInitializationFailed ), message ) {}
+    InitializationFailedError( char const * message )
+      : SystemError( make_error_code( Result::eErrorInitializationFailed ), message ) {}
   };
-  static_assert( sizeof( PresentRegionsKHR ) == sizeof( VkPresentRegionsKHR ), "struct and wrapper have different size!" );
 
-  struct PhysicalDeviceVariablePointerFeatures
+  class DeviceLostError : public SystemError
   {
-    PhysicalDeviceVariablePointerFeatures( Bool32 variablePointersStorageBuffer_ = 0,
-                                           Bool32 variablePointers_ = 0 )
-      : variablePointersStorageBuffer( variablePointersStorageBuffer_ )
-      , variablePointers( variablePointers_ )
-    {
-    }
-
-    PhysicalDeviceVariablePointerFeatures( VkPhysicalDeviceVariablePointerFeatures const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceVariablePointerFeatures ) );
-    }
-
-    PhysicalDeviceVariablePointerFeatures& operator=( VkPhysicalDeviceVariablePointerFeatures const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceVariablePointerFeatures ) );
-      return *this;
-    }
-    PhysicalDeviceVariablePointerFeatures& setPNext( void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    PhysicalDeviceVariablePointerFeatures& setVariablePointersStorageBuffer( Bool32 variablePointersStorageBuffer_ )
-    {
-      variablePointersStorageBuffer = variablePointersStorageBuffer_;
-      return *this;
-    }
-
-    PhysicalDeviceVariablePointerFeatures& setVariablePointers( Bool32 variablePointers_ )
-    {
-      variablePointers = variablePointers_;
-      return *this;
-    }
-
-    operator VkPhysicalDeviceVariablePointerFeatures const&() const
-    {
-      return *reinterpret_cast<const VkPhysicalDeviceVariablePointerFeatures*>(this);
-    }
-
-    operator VkPhysicalDeviceVariablePointerFeatures &()
-    {
-      return *reinterpret_cast<VkPhysicalDeviceVariablePointerFeatures*>(this);
-    }
-
-    bool operator==( PhysicalDeviceVariablePointerFeatures const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( variablePointersStorageBuffer == rhs.variablePointersStorageBuffer )
-          && ( variablePointers == rhs.variablePointers );
-    }
-
-    bool operator!=( PhysicalDeviceVariablePointerFeatures const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePhysicalDeviceVariablePointerFeatures;
-
   public:
-    void* pNext = nullptr;
-    Bool32 variablePointersStorageBuffer;
-    Bool32 variablePointers;
+    DeviceLostError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorDeviceLost ), message ) {}
+    DeviceLostError( char const * message )
+      : SystemError( make_error_code( Result::eErrorDeviceLost ), message ) {}
   };
-  static_assert( sizeof( PhysicalDeviceVariablePointerFeatures ) == sizeof( VkPhysicalDeviceVariablePointerFeatures ), "struct and wrapper have different size!" );
 
-  using PhysicalDeviceVariablePointerFeaturesKHR = PhysicalDeviceVariablePointerFeatures;
-
-  struct PhysicalDeviceIDProperties
+  class MemoryMapFailedError : public SystemError
   {
-    operator VkPhysicalDeviceIDProperties const&() const
-    {
-      return *reinterpret_cast<const VkPhysicalDeviceIDProperties*>(this);
-    }
-
-    operator VkPhysicalDeviceIDProperties &()
-    {
-      return *reinterpret_cast<VkPhysicalDeviceIDProperties*>(this);
-    }
-
-    bool operator==( PhysicalDeviceIDProperties const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( memcmp( deviceUUID, rhs.deviceUUID, VK_UUID_SIZE * sizeof( uint8_t ) ) == 0 )
-          && ( memcmp( driverUUID, rhs.driverUUID, VK_UUID_SIZE * sizeof( uint8_t ) ) == 0 )
-          && ( memcmp( deviceLUID, rhs.deviceLUID, VK_LUID_SIZE * sizeof( uint8_t ) ) == 0 )
-          && ( deviceNodeMask == rhs.deviceNodeMask )
-          && ( deviceLUIDValid == rhs.deviceLUIDValid );
-    }
-
-    bool operator!=( PhysicalDeviceIDProperties const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePhysicalDeviceIdProperties;
-
   public:
-    void* pNext = nullptr;
-    uint8_t deviceUUID[VK_UUID_SIZE];
-    uint8_t driverUUID[VK_UUID_SIZE];
-    uint8_t deviceLUID[VK_LUID_SIZE];
-    uint32_t deviceNodeMask;
-    Bool32 deviceLUIDValid;
+    MemoryMapFailedError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorMemoryMapFailed ), message ) {}
+    MemoryMapFailedError( char const * message )
+      : SystemError( make_error_code( Result::eErrorMemoryMapFailed ), message ) {}
   };
-  static_assert( sizeof( PhysicalDeviceIDProperties ) == sizeof( VkPhysicalDeviceIDProperties ), "struct and wrapper have different size!" );
 
-  using PhysicalDeviceIDPropertiesKHR = PhysicalDeviceIDProperties;
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-  struct ExportMemoryWin32HandleInfoKHR
+  class LayerNotPresentError : public SystemError
   {
-    ExportMemoryWin32HandleInfoKHR( const SECURITY_ATTRIBUTES* pAttributes_ = nullptr,
-                                    DWORD dwAccess_ = 0,
-                                    LPCWSTR name_ = 0 )
-      : pAttributes( pAttributes_ )
-      , dwAccess( dwAccess_ )
-      , name( name_ )
-    {
-    }
-
-    ExportMemoryWin32HandleInfoKHR( VkExportMemoryWin32HandleInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ExportMemoryWin32HandleInfoKHR ) );
-    }
-
-    ExportMemoryWin32HandleInfoKHR& operator=( VkExportMemoryWin32HandleInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ExportMemoryWin32HandleInfoKHR ) );
-      return *this;
-    }
-    ExportMemoryWin32HandleInfoKHR& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    ExportMemoryWin32HandleInfoKHR& setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ )
-    {
-      pAttributes = pAttributes_;
-      return *this;
-    }
-
-    ExportMemoryWin32HandleInfoKHR& setDwAccess( DWORD dwAccess_ )
-    {
-      dwAccess = dwAccess_;
-      return *this;
-    }
-
-    ExportMemoryWin32HandleInfoKHR& setName( LPCWSTR name_ )
-    {
-      name = name_;
-      return *this;
-    }
-
-    operator VkExportMemoryWin32HandleInfoKHR const&() const
-    {
-      return *reinterpret_cast<const VkExportMemoryWin32HandleInfoKHR*>(this);
-    }
-
-    operator VkExportMemoryWin32HandleInfoKHR &()
-    {
-      return *reinterpret_cast<VkExportMemoryWin32HandleInfoKHR*>(this);
-    }
-
-    bool operator==( ExportMemoryWin32HandleInfoKHR const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( pAttributes == rhs.pAttributes )
-          && ( dwAccess == rhs.dwAccess )
-          && ( name == rhs.name );
-    }
-
-    bool operator!=( ExportMemoryWin32HandleInfoKHR const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eExportMemoryWin32HandleInfoKHR;
-
   public:
-    const void* pNext = nullptr;
-    const SECURITY_ATTRIBUTES* pAttributes;
-    DWORD dwAccess;
-    LPCWSTR name;
+    LayerNotPresentError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorLayerNotPresent ), message ) {}
+    LayerNotPresentError( char const * message )
+      : SystemError( make_error_code( Result::eErrorLayerNotPresent ), message ) {}
   };
-  static_assert( sizeof( ExportMemoryWin32HandleInfoKHR ) == sizeof( VkExportMemoryWin32HandleInfoKHR ), "struct and wrapper have different size!" );
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
 
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-  struct MemoryWin32HandlePropertiesKHR
+  class ExtensionNotPresentError : public SystemError
   {
-    operator VkMemoryWin32HandlePropertiesKHR const&() const
-    {
-      return *reinterpret_cast<const VkMemoryWin32HandlePropertiesKHR*>(this);
-    }
-
-    operator VkMemoryWin32HandlePropertiesKHR &()
-    {
-      return *reinterpret_cast<VkMemoryWin32HandlePropertiesKHR*>(this);
-    }
-
-    bool operator==( MemoryWin32HandlePropertiesKHR const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( memoryTypeBits == rhs.memoryTypeBits );
-    }
-
-    bool operator!=( MemoryWin32HandlePropertiesKHR const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eMemoryWin32HandlePropertiesKHR;
-
   public:
-    void* pNext = nullptr;
-    uint32_t memoryTypeBits;
+    ExtensionNotPresentError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorExtensionNotPresent ), message ) {}
+    ExtensionNotPresentError( char const * message )
+      : SystemError( make_error_code( Result::eErrorExtensionNotPresent ), message ) {}
   };
-  static_assert( sizeof( MemoryWin32HandlePropertiesKHR ) == sizeof( VkMemoryWin32HandlePropertiesKHR ), "struct and wrapper have different size!" );
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
 
-  struct MemoryFdPropertiesKHR
+  class FeatureNotPresentError : public SystemError
   {
-    operator VkMemoryFdPropertiesKHR const&() const
-    {
-      return *reinterpret_cast<const VkMemoryFdPropertiesKHR*>(this);
-    }
-
-    operator VkMemoryFdPropertiesKHR &()
-    {
-      return *reinterpret_cast<VkMemoryFdPropertiesKHR*>(this);
-    }
-
-    bool operator==( MemoryFdPropertiesKHR const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( memoryTypeBits == rhs.memoryTypeBits );
-    }
-
-    bool operator!=( MemoryFdPropertiesKHR const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eMemoryFdPropertiesKHR;
-
   public:
-    void* pNext = nullptr;
-    uint32_t memoryTypeBits;
+    FeatureNotPresentError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorFeatureNotPresent ), message ) {}
+    FeatureNotPresentError( char const * message )
+      : SystemError( make_error_code( Result::eErrorFeatureNotPresent ), message ) {}
   };
-  static_assert( sizeof( MemoryFdPropertiesKHR ) == sizeof( VkMemoryFdPropertiesKHR ), "struct and wrapper have different size!" );
 
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-  struct Win32KeyedMutexAcquireReleaseInfoKHR
+  class IncompatibleDriverError : public SystemError
   {
-    Win32KeyedMutexAcquireReleaseInfoKHR( uint32_t acquireCount_ = 0,
-                                          const DeviceMemory* pAcquireSyncs_ = nullptr,
-                                          const uint64_t* pAcquireKeys_ = nullptr,
-                                          const uint32_t* pAcquireTimeouts_ = nullptr,
-                                          uint32_t releaseCount_ = 0,
-                                          const DeviceMemory* pReleaseSyncs_ = nullptr,
-                                          const uint64_t* pReleaseKeys_ = nullptr )
-      : acquireCount( acquireCount_ )
-      , pAcquireSyncs( pAcquireSyncs_ )
-      , pAcquireKeys( pAcquireKeys_ )
-      , pAcquireTimeouts( pAcquireTimeouts_ )
-      , releaseCount( releaseCount_ )
-      , pReleaseSyncs( pReleaseSyncs_ )
-      , pReleaseKeys( pReleaseKeys_ )
-    {
-    }
-
-    Win32KeyedMutexAcquireReleaseInfoKHR( VkWin32KeyedMutexAcquireReleaseInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( Win32KeyedMutexAcquireReleaseInfoKHR ) );
-    }
-
-    Win32KeyedMutexAcquireReleaseInfoKHR& operator=( VkWin32KeyedMutexAcquireReleaseInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( Win32KeyedMutexAcquireReleaseInfoKHR ) );
-      return *this;
-    }
-    Win32KeyedMutexAcquireReleaseInfoKHR& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    Win32KeyedMutexAcquireReleaseInfoKHR& setAcquireCount( uint32_t acquireCount_ )
-    {
-      acquireCount = acquireCount_;
-      return *this;
-    }
-
-    Win32KeyedMutexAcquireReleaseInfoKHR& setPAcquireSyncs( const DeviceMemory* pAcquireSyncs_ )
-    {
-      pAcquireSyncs = pAcquireSyncs_;
-      return *this;
-    }
-
-    Win32KeyedMutexAcquireReleaseInfoKHR& setPAcquireKeys( const uint64_t* pAcquireKeys_ )
-    {
-      pAcquireKeys = pAcquireKeys_;
-      return *this;
-    }
-
-    Win32KeyedMutexAcquireReleaseInfoKHR& setPAcquireTimeouts( const uint32_t* pAcquireTimeouts_ )
-    {
-      pAcquireTimeouts = pAcquireTimeouts_;
-      return *this;
-    }
-
-    Win32KeyedMutexAcquireReleaseInfoKHR& setReleaseCount( uint32_t releaseCount_ )
-    {
-      releaseCount = releaseCount_;
-      return *this;
-    }
-
-    Win32KeyedMutexAcquireReleaseInfoKHR& setPReleaseSyncs( const DeviceMemory* pReleaseSyncs_ )
-    {
-      pReleaseSyncs = pReleaseSyncs_;
-      return *this;
-    }
-
-    Win32KeyedMutexAcquireReleaseInfoKHR& setPReleaseKeys( const uint64_t* pReleaseKeys_ )
-    {
-      pReleaseKeys = pReleaseKeys_;
-      return *this;
-    }
-
-    operator VkWin32KeyedMutexAcquireReleaseInfoKHR const&() const
-    {
-      return *reinterpret_cast<const VkWin32KeyedMutexAcquireReleaseInfoKHR*>(this);
-    }
-
-    operator VkWin32KeyedMutexAcquireReleaseInfoKHR &()
-    {
-      return *reinterpret_cast<VkWin32KeyedMutexAcquireReleaseInfoKHR*>(this);
-    }
-
-    bool operator==( Win32KeyedMutexAcquireReleaseInfoKHR const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( acquireCount == rhs.acquireCount )
-          && ( pAcquireSyncs == rhs.pAcquireSyncs )
-          && ( pAcquireKeys == rhs.pAcquireKeys )
-          && ( pAcquireTimeouts == rhs.pAcquireTimeouts )
-          && ( releaseCount == rhs.releaseCount )
-          && ( pReleaseSyncs == rhs.pReleaseSyncs )
-          && ( pReleaseKeys == rhs.pReleaseKeys );
-    }
-
-    bool operator!=( Win32KeyedMutexAcquireReleaseInfoKHR const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR;
-
   public:
-    const void* pNext = nullptr;
-    uint32_t acquireCount;
-    const DeviceMemory* pAcquireSyncs;
-    const uint64_t* pAcquireKeys;
-    const uint32_t* pAcquireTimeouts;
-    uint32_t releaseCount;
-    const DeviceMemory* pReleaseSyncs;
-    const uint64_t* pReleaseKeys;
+    IncompatibleDriverError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorIncompatibleDriver ), message ) {}
+    IncompatibleDriverError( char const * message )
+      : SystemError( make_error_code( Result::eErrorIncompatibleDriver ), message ) {}
   };
-  static_assert( sizeof( Win32KeyedMutexAcquireReleaseInfoKHR ) == sizeof( VkWin32KeyedMutexAcquireReleaseInfoKHR ), "struct and wrapper have different size!" );
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
 
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-  struct ExportSemaphoreWin32HandleInfoKHR
+  class TooManyObjectsError : public SystemError
   {
-    ExportSemaphoreWin32HandleInfoKHR( const SECURITY_ATTRIBUTES* pAttributes_ = nullptr,
-                                       DWORD dwAccess_ = 0,
-                                       LPCWSTR name_ = 0 )
-      : pAttributes( pAttributes_ )
-      , dwAccess( dwAccess_ )
-      , name( name_ )
-    {
-    }
-
-    ExportSemaphoreWin32HandleInfoKHR( VkExportSemaphoreWin32HandleInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ExportSemaphoreWin32HandleInfoKHR ) );
-    }
-
-    ExportSemaphoreWin32HandleInfoKHR& operator=( VkExportSemaphoreWin32HandleInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ExportSemaphoreWin32HandleInfoKHR ) );
-      return *this;
-    }
-    ExportSemaphoreWin32HandleInfoKHR& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    ExportSemaphoreWin32HandleInfoKHR& setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ )
-    {
-      pAttributes = pAttributes_;
-      return *this;
-    }
-
-    ExportSemaphoreWin32HandleInfoKHR& setDwAccess( DWORD dwAccess_ )
-    {
-      dwAccess = dwAccess_;
-      return *this;
-    }
-
-    ExportSemaphoreWin32HandleInfoKHR& setName( LPCWSTR name_ )
-    {
-      name = name_;
-      return *this;
-    }
-
-    operator VkExportSemaphoreWin32HandleInfoKHR const&() const
-    {
-      return *reinterpret_cast<const VkExportSemaphoreWin32HandleInfoKHR*>(this);
-    }
-
-    operator VkExportSemaphoreWin32HandleInfoKHR &()
-    {
-      return *reinterpret_cast<VkExportSemaphoreWin32HandleInfoKHR*>(this);
-    }
-
-    bool operator==( ExportSemaphoreWin32HandleInfoKHR const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( pAttributes == rhs.pAttributes )
-          && ( dwAccess == rhs.dwAccess )
-          && ( name == rhs.name );
-    }
-
-    bool operator!=( ExportSemaphoreWin32HandleInfoKHR const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eExportSemaphoreWin32HandleInfoKHR;
-
   public:
-    const void* pNext = nullptr;
-    const SECURITY_ATTRIBUTES* pAttributes;
-    DWORD dwAccess;
-    LPCWSTR name;
+    TooManyObjectsError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorTooManyObjects ), message ) {}
+    TooManyObjectsError( char const * message )
+      : SystemError( make_error_code( Result::eErrorTooManyObjects ), message ) {}
   };
-  static_assert( sizeof( ExportSemaphoreWin32HandleInfoKHR ) == sizeof( VkExportSemaphoreWin32HandleInfoKHR ), "struct and wrapper have different size!" );
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
 
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-  struct D3D12FenceSubmitInfoKHR
+  class FormatNotSupportedError : public SystemError
   {
-    D3D12FenceSubmitInfoKHR( uint32_t waitSemaphoreValuesCount_ = 0,
-                             const uint64_t* pWaitSemaphoreValues_ = nullptr,
-                             uint32_t signalSemaphoreValuesCount_ = 0,
-                             const uint64_t* pSignalSemaphoreValues_ = nullptr )
-      : waitSemaphoreValuesCount( waitSemaphoreValuesCount_ )
-      , pWaitSemaphoreValues( pWaitSemaphoreValues_ )
-      , signalSemaphoreValuesCount( signalSemaphoreValuesCount_ )
-      , pSignalSemaphoreValues( pSignalSemaphoreValues_ )
-    {
-    }
-
-    D3D12FenceSubmitInfoKHR( VkD3D12FenceSubmitInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( D3D12FenceSubmitInfoKHR ) );
-    }
-
-    D3D12FenceSubmitInfoKHR& operator=( VkD3D12FenceSubmitInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( D3D12FenceSubmitInfoKHR ) );
-      return *this;
-    }
-    D3D12FenceSubmitInfoKHR& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    D3D12FenceSubmitInfoKHR& setWaitSemaphoreValuesCount( uint32_t waitSemaphoreValuesCount_ )
-    {
-      waitSemaphoreValuesCount = waitSemaphoreValuesCount_;
-      return *this;
-    }
-
-    D3D12FenceSubmitInfoKHR& setPWaitSemaphoreValues( const uint64_t* pWaitSemaphoreValues_ )
-    {
-      pWaitSemaphoreValues = pWaitSemaphoreValues_;
-      return *this;
-    }
-
-    D3D12FenceSubmitInfoKHR& setSignalSemaphoreValuesCount( uint32_t signalSemaphoreValuesCount_ )
-    {
-      signalSemaphoreValuesCount = signalSemaphoreValuesCount_;
-      return *this;
-    }
-
-    D3D12FenceSubmitInfoKHR& setPSignalSemaphoreValues( const uint64_t* pSignalSemaphoreValues_ )
-    {
-      pSignalSemaphoreValues = pSignalSemaphoreValues_;
-      return *this;
-    }
-
-    operator VkD3D12FenceSubmitInfoKHR const&() const
-    {
-      return *reinterpret_cast<const VkD3D12FenceSubmitInfoKHR*>(this);
-    }
-
-    operator VkD3D12FenceSubmitInfoKHR &()
-    {
-      return *reinterpret_cast<VkD3D12FenceSubmitInfoKHR*>(this);
-    }
-
-    bool operator==( D3D12FenceSubmitInfoKHR const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( waitSemaphoreValuesCount == rhs.waitSemaphoreValuesCount )
-          && ( pWaitSemaphoreValues == rhs.pWaitSemaphoreValues )
-          && ( signalSemaphoreValuesCount == rhs.signalSemaphoreValuesCount )
-          && ( pSignalSemaphoreValues == rhs.pSignalSemaphoreValues );
-    }
-
-    bool operator!=( D3D12FenceSubmitInfoKHR const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eD3D12FenceSubmitInfoKHR;
-
   public:
-    const void* pNext = nullptr;
-    uint32_t waitSemaphoreValuesCount;
-    const uint64_t* pWaitSemaphoreValues;
-    uint32_t signalSemaphoreValuesCount;
-    const uint64_t* pSignalSemaphoreValues;
+    FormatNotSupportedError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorFormatNotSupported ), message ) {}
+    FormatNotSupportedError( char const * message )
+      : SystemError( make_error_code( Result::eErrorFormatNotSupported ), message ) {}
   };
-  static_assert( sizeof( D3D12FenceSubmitInfoKHR ) == sizeof( VkD3D12FenceSubmitInfoKHR ), "struct and wrapper have different size!" );
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
 
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-  struct ExportFenceWin32HandleInfoKHR
+  class FragmentedPoolError : public SystemError
   {
-    ExportFenceWin32HandleInfoKHR( const SECURITY_ATTRIBUTES* pAttributes_ = nullptr,
-                                   DWORD dwAccess_ = 0,
-                                   LPCWSTR name_ = 0 )
-      : pAttributes( pAttributes_ )
-      , dwAccess( dwAccess_ )
-      , name( name_ )
-    {
-    }
-
-    ExportFenceWin32HandleInfoKHR( VkExportFenceWin32HandleInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ExportFenceWin32HandleInfoKHR ) );
-    }
-
-    ExportFenceWin32HandleInfoKHR& operator=( VkExportFenceWin32HandleInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ExportFenceWin32HandleInfoKHR ) );
-      return *this;
-    }
-    ExportFenceWin32HandleInfoKHR& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    ExportFenceWin32HandleInfoKHR& setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ )
-    {
-      pAttributes = pAttributes_;
-      return *this;
-    }
-
-    ExportFenceWin32HandleInfoKHR& setDwAccess( DWORD dwAccess_ )
-    {
-      dwAccess = dwAccess_;
-      return *this;
-    }
-
-    ExportFenceWin32HandleInfoKHR& setName( LPCWSTR name_ )
-    {
-      name = name_;
-      return *this;
-    }
-
-    operator VkExportFenceWin32HandleInfoKHR const&() const
-    {
-      return *reinterpret_cast<const VkExportFenceWin32HandleInfoKHR*>(this);
-    }
-
-    operator VkExportFenceWin32HandleInfoKHR &()
-    {
-      return *reinterpret_cast<VkExportFenceWin32HandleInfoKHR*>(this);
-    }
-
-    bool operator==( ExportFenceWin32HandleInfoKHR const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( pAttributes == rhs.pAttributes )
-          && ( dwAccess == rhs.dwAccess )
-          && ( name == rhs.name );
-    }
-
-    bool operator!=( ExportFenceWin32HandleInfoKHR const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eExportFenceWin32HandleInfoKHR;
-
   public:
-    const void* pNext = nullptr;
-    const SECURITY_ATTRIBUTES* pAttributes;
-    DWORD dwAccess;
-    LPCWSTR name;
+    FragmentedPoolError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorFragmentedPool ), message ) {}
+    FragmentedPoolError( char const * message )
+      : SystemError( make_error_code( Result::eErrorFragmentedPool ), message ) {}
   };
-  static_assert( sizeof( ExportFenceWin32HandleInfoKHR ) == sizeof( VkExportFenceWin32HandleInfoKHR ), "struct and wrapper have different size!" );
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
 
-  struct PhysicalDeviceMultiviewFeatures
+  class UnknownError : public SystemError
   {
-    PhysicalDeviceMultiviewFeatures( Bool32 multiview_ = 0,
-                                     Bool32 multiviewGeometryShader_ = 0,
-                                     Bool32 multiviewTessellationShader_ = 0 )
-      : multiview( multiview_ )
-      , multiviewGeometryShader( multiviewGeometryShader_ )
-      , multiviewTessellationShader( multiviewTessellationShader_ )
-    {
-    }
-
-    PhysicalDeviceMultiviewFeatures( VkPhysicalDeviceMultiviewFeatures const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceMultiviewFeatures ) );
-    }
-
-    PhysicalDeviceMultiviewFeatures& operator=( VkPhysicalDeviceMultiviewFeatures const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceMultiviewFeatures ) );
-      return *this;
-    }
-    PhysicalDeviceMultiviewFeatures& setPNext( void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    PhysicalDeviceMultiviewFeatures& setMultiview( Bool32 multiview_ )
-    {
-      multiview = multiview_;
-      return *this;
-    }
-
-    PhysicalDeviceMultiviewFeatures& setMultiviewGeometryShader( Bool32 multiviewGeometryShader_ )
-    {
-      multiviewGeometryShader = multiviewGeometryShader_;
-      return *this;
-    }
-
-    PhysicalDeviceMultiviewFeatures& setMultiviewTessellationShader( Bool32 multiviewTessellationShader_ )
-    {
-      multiviewTessellationShader = multiviewTessellationShader_;
-      return *this;
-    }
-
-    operator VkPhysicalDeviceMultiviewFeatures const&() const
-    {
-      return *reinterpret_cast<const VkPhysicalDeviceMultiviewFeatures*>(this);
-    }
-
-    operator VkPhysicalDeviceMultiviewFeatures &()
-    {
-      return *reinterpret_cast<VkPhysicalDeviceMultiviewFeatures*>(this);
-    }
-
-    bool operator==( PhysicalDeviceMultiviewFeatures const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( multiview == rhs.multiview )
-          && ( multiviewGeometryShader == rhs.multiviewGeometryShader )
-          && ( multiviewTessellationShader == rhs.multiviewTessellationShader );
-    }
-
-    bool operator!=( PhysicalDeviceMultiviewFeatures const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePhysicalDeviceMultiviewFeatures;
-
   public:
-    void* pNext = nullptr;
-    Bool32 multiview;
-    Bool32 multiviewGeometryShader;
-    Bool32 multiviewTessellationShader;
+    UnknownError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorUnknown ), message ) {}
+    UnknownError( char const * message )
+      : SystemError( make_error_code( Result::eErrorUnknown ), message ) {}
   };
-  static_assert( sizeof( PhysicalDeviceMultiviewFeatures ) == sizeof( VkPhysicalDeviceMultiviewFeatures ), "struct and wrapper have different size!" );
 
-  using PhysicalDeviceMultiviewFeaturesKHR = PhysicalDeviceMultiviewFeatures;
-
-  struct PhysicalDeviceMultiviewProperties
+  class OutOfPoolMemoryError : public SystemError
   {
-    operator VkPhysicalDeviceMultiviewProperties const&() const
-    {
-      return *reinterpret_cast<const VkPhysicalDeviceMultiviewProperties*>(this);
-    }
-
-    operator VkPhysicalDeviceMultiviewProperties &()
-    {
-      return *reinterpret_cast<VkPhysicalDeviceMultiviewProperties*>(this);
-    }
-
-    bool operator==( PhysicalDeviceMultiviewProperties const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( maxMultiviewViewCount == rhs.maxMultiviewViewCount )
-          && ( maxMultiviewInstanceIndex == rhs.maxMultiviewInstanceIndex );
-    }
-
-    bool operator!=( PhysicalDeviceMultiviewProperties const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePhysicalDeviceMultiviewProperties;
-
   public:
-    void* pNext = nullptr;
-    uint32_t maxMultiviewViewCount;
-    uint32_t maxMultiviewInstanceIndex;
+    OutOfPoolMemoryError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorOutOfPoolMemory ), message ) {}
+    OutOfPoolMemoryError( char const * message )
+      : SystemError( make_error_code( Result::eErrorOutOfPoolMemory ), message ) {}
   };
-  static_assert( sizeof( PhysicalDeviceMultiviewProperties ) == sizeof( VkPhysicalDeviceMultiviewProperties ), "struct and wrapper have different size!" );
 
-  using PhysicalDeviceMultiviewPropertiesKHR = PhysicalDeviceMultiviewProperties;
-
-  struct RenderPassMultiviewCreateInfo
+  class InvalidExternalHandleError : public SystemError
   {
-    RenderPassMultiviewCreateInfo( uint32_t subpassCount_ = 0,
-                                   const uint32_t* pViewMasks_ = nullptr,
-                                   uint32_t dependencyCount_ = 0,
-                                   const int32_t* pViewOffsets_ = nullptr,
-                                   uint32_t correlationMaskCount_ = 0,
-                                   const uint32_t* pCorrelationMasks_ = nullptr )
-      : subpassCount( subpassCount_ )
-      , pViewMasks( pViewMasks_ )
-      , dependencyCount( dependencyCount_ )
-      , pViewOffsets( pViewOffsets_ )
-      , correlationMaskCount( correlationMaskCount_ )
-      , pCorrelationMasks( pCorrelationMasks_ )
-    {
-    }
-
-    RenderPassMultiviewCreateInfo( VkRenderPassMultiviewCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( RenderPassMultiviewCreateInfo ) );
-    }
-
-    RenderPassMultiviewCreateInfo& operator=( VkRenderPassMultiviewCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( RenderPassMultiviewCreateInfo ) );
-      return *this;
-    }
-    RenderPassMultiviewCreateInfo& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    RenderPassMultiviewCreateInfo& setSubpassCount( uint32_t subpassCount_ )
-    {
-      subpassCount = subpassCount_;
-      return *this;
-    }
-
-    RenderPassMultiviewCreateInfo& setPViewMasks( const uint32_t* pViewMasks_ )
-    {
-      pViewMasks = pViewMasks_;
-      return *this;
-    }
-
-    RenderPassMultiviewCreateInfo& setDependencyCount( uint32_t dependencyCount_ )
-    {
-      dependencyCount = dependencyCount_;
-      return *this;
-    }
-
-    RenderPassMultiviewCreateInfo& setPViewOffsets( const int32_t* pViewOffsets_ )
-    {
-      pViewOffsets = pViewOffsets_;
-      return *this;
-    }
-
-    RenderPassMultiviewCreateInfo& setCorrelationMaskCount( uint32_t correlationMaskCount_ )
-    {
-      correlationMaskCount = correlationMaskCount_;
-      return *this;
-    }
-
-    RenderPassMultiviewCreateInfo& setPCorrelationMasks( const uint32_t* pCorrelationMasks_ )
-    {
-      pCorrelationMasks = pCorrelationMasks_;
-      return *this;
-    }
-
-    operator VkRenderPassMultiviewCreateInfo const&() const
-    {
-      return *reinterpret_cast<const VkRenderPassMultiviewCreateInfo*>(this);
-    }
-
-    operator VkRenderPassMultiviewCreateInfo &()
-    {
-      return *reinterpret_cast<VkRenderPassMultiviewCreateInfo*>(this);
-    }
-
-    bool operator==( RenderPassMultiviewCreateInfo const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( subpassCount == rhs.subpassCount )
-          && ( pViewMasks == rhs.pViewMasks )
-          && ( dependencyCount == rhs.dependencyCount )
-          && ( pViewOffsets == rhs.pViewOffsets )
-          && ( correlationMaskCount == rhs.correlationMaskCount )
-          && ( pCorrelationMasks == rhs.pCorrelationMasks );
-    }
-
-    bool operator!=( RenderPassMultiviewCreateInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eRenderPassMultiviewCreateInfo;
-
   public:
-    const void* pNext = nullptr;
-    uint32_t subpassCount;
-    const uint32_t* pViewMasks;
-    uint32_t dependencyCount;
-    const int32_t* pViewOffsets;
-    uint32_t correlationMaskCount;
-    const uint32_t* pCorrelationMasks;
+    InvalidExternalHandleError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorInvalidExternalHandle ), message ) {}
+    InvalidExternalHandleError( char const * message )
+      : SystemError( make_error_code( Result::eErrorInvalidExternalHandle ), message ) {}
   };
-  static_assert( sizeof( RenderPassMultiviewCreateInfo ) == sizeof( VkRenderPassMultiviewCreateInfo ), "struct and wrapper have different size!" );
 
-  using RenderPassMultiviewCreateInfoKHR = RenderPassMultiviewCreateInfo;
-
-  struct BindBufferMemoryInfo
+  class FragmentationError : public SystemError
   {
-    BindBufferMemoryInfo( Buffer buffer_ = Buffer(),
-                          DeviceMemory memory_ = DeviceMemory(),
-                          DeviceSize memoryOffset_ = 0 )
-      : buffer( buffer_ )
-      , memory( memory_ )
-      , memoryOffset( memoryOffset_ )
-    {
-    }
-
-    BindBufferMemoryInfo( VkBindBufferMemoryInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( BindBufferMemoryInfo ) );
-    }
-
-    BindBufferMemoryInfo& operator=( VkBindBufferMemoryInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( BindBufferMemoryInfo ) );
-      return *this;
-    }
-    BindBufferMemoryInfo& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    BindBufferMemoryInfo& setBuffer( Buffer buffer_ )
-    {
-      buffer = buffer_;
-      return *this;
-    }
-
-    BindBufferMemoryInfo& setMemory( DeviceMemory memory_ )
-    {
-      memory = memory_;
-      return *this;
-    }
-
-    BindBufferMemoryInfo& setMemoryOffset( DeviceSize memoryOffset_ )
-    {
-      memoryOffset = memoryOffset_;
-      return *this;
-    }
-
-    operator VkBindBufferMemoryInfo const&() const
-    {
-      return *reinterpret_cast<const VkBindBufferMemoryInfo*>(this);
-    }
-
-    operator VkBindBufferMemoryInfo &()
-    {
-      return *reinterpret_cast<VkBindBufferMemoryInfo*>(this);
-    }
-
-    bool operator==( BindBufferMemoryInfo const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( buffer == rhs.buffer )
-          && ( memory == rhs.memory )
-          && ( memoryOffset == rhs.memoryOffset );
-    }
-
-    bool operator!=( BindBufferMemoryInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eBindBufferMemoryInfo;
-
   public:
-    const void* pNext = nullptr;
-    Buffer buffer;
-    DeviceMemory memory;
-    DeviceSize memoryOffset;
+    FragmentationError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorFragmentation ), message ) {}
+    FragmentationError( char const * message )
+      : SystemError( make_error_code( Result::eErrorFragmentation ), message ) {}
   };
-  static_assert( sizeof( BindBufferMemoryInfo ) == sizeof( VkBindBufferMemoryInfo ), "struct and wrapper have different size!" );
 
-  using BindBufferMemoryInfoKHR = BindBufferMemoryInfo;
-
-  struct BindBufferMemoryDeviceGroupInfo
+  class InvalidOpaqueCaptureAddressError : public SystemError
   {
-    BindBufferMemoryDeviceGroupInfo( uint32_t deviceIndexCount_ = 0,
-                                     const uint32_t* pDeviceIndices_ = nullptr )
-      : deviceIndexCount( deviceIndexCount_ )
-      , pDeviceIndices( pDeviceIndices_ )
-    {
-    }
-
-    BindBufferMemoryDeviceGroupInfo( VkBindBufferMemoryDeviceGroupInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( BindBufferMemoryDeviceGroupInfo ) );
-    }
-
-    BindBufferMemoryDeviceGroupInfo& operator=( VkBindBufferMemoryDeviceGroupInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( BindBufferMemoryDeviceGroupInfo ) );
-      return *this;
-    }
-    BindBufferMemoryDeviceGroupInfo& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    BindBufferMemoryDeviceGroupInfo& setDeviceIndexCount( uint32_t deviceIndexCount_ )
-    {
-      deviceIndexCount = deviceIndexCount_;
-      return *this;
-    }
-
-    BindBufferMemoryDeviceGroupInfo& setPDeviceIndices( const uint32_t* pDeviceIndices_ )
-    {
-      pDeviceIndices = pDeviceIndices_;
-      return *this;
-    }
-
-    operator VkBindBufferMemoryDeviceGroupInfo const&() const
-    {
-      return *reinterpret_cast<const VkBindBufferMemoryDeviceGroupInfo*>(this);
-    }
-
-    operator VkBindBufferMemoryDeviceGroupInfo &()
-    {
-      return *reinterpret_cast<VkBindBufferMemoryDeviceGroupInfo*>(this);
-    }
-
-    bool operator==( BindBufferMemoryDeviceGroupInfo const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( deviceIndexCount == rhs.deviceIndexCount )
-          && ( pDeviceIndices == rhs.pDeviceIndices );
-    }
-
-    bool operator!=( BindBufferMemoryDeviceGroupInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eBindBufferMemoryDeviceGroupInfo;
-
   public:
-    const void* pNext = nullptr;
-    uint32_t deviceIndexCount;
-    const uint32_t* pDeviceIndices;
+    InvalidOpaqueCaptureAddressError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorInvalidOpaqueCaptureAddress ), message ) {}
+    InvalidOpaqueCaptureAddressError( char const * message )
+      : SystemError( make_error_code( Result::eErrorInvalidOpaqueCaptureAddress ), message ) {}
   };
-  static_assert( sizeof( BindBufferMemoryDeviceGroupInfo ) == sizeof( VkBindBufferMemoryDeviceGroupInfo ), "struct and wrapper have different size!" );
 
-  using BindBufferMemoryDeviceGroupInfoKHR = BindBufferMemoryDeviceGroupInfo;
-
-  struct BindImageMemoryInfo
+  class SurfaceLostKHRError : public SystemError
   {
-    BindImageMemoryInfo( Image image_ = Image(),
-                         DeviceMemory memory_ = DeviceMemory(),
-                         DeviceSize memoryOffset_ = 0 )
-      : image( image_ )
-      , memory( memory_ )
-      , memoryOffset( memoryOffset_ )
-    {
-    }
-
-    BindImageMemoryInfo( VkBindImageMemoryInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( BindImageMemoryInfo ) );
-    }
-
-    BindImageMemoryInfo& operator=( VkBindImageMemoryInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( BindImageMemoryInfo ) );
-      return *this;
-    }
-    BindImageMemoryInfo& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    BindImageMemoryInfo& setImage( Image image_ )
-    {
-      image = image_;
-      return *this;
-    }
-
-    BindImageMemoryInfo& setMemory( DeviceMemory memory_ )
-    {
-      memory = memory_;
-      return *this;
-    }
-
-    BindImageMemoryInfo& setMemoryOffset( DeviceSize memoryOffset_ )
-    {
-      memoryOffset = memoryOffset_;
-      return *this;
-    }
-
-    operator VkBindImageMemoryInfo const&() const
-    {
-      return *reinterpret_cast<const VkBindImageMemoryInfo*>(this);
-    }
-
-    operator VkBindImageMemoryInfo &()
-    {
-      return *reinterpret_cast<VkBindImageMemoryInfo*>(this);
-    }
-
-    bool operator==( BindImageMemoryInfo const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( image == rhs.image )
-          && ( memory == rhs.memory )
-          && ( memoryOffset == rhs.memoryOffset );
-    }
-
-    bool operator!=( BindImageMemoryInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eBindImageMemoryInfo;
-
   public:
-    const void* pNext = nullptr;
-    Image image;
-    DeviceMemory memory;
-    DeviceSize memoryOffset;
+    SurfaceLostKHRError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorSurfaceLostKHR ), message ) {}
+    SurfaceLostKHRError( char const * message )
+      : SystemError( make_error_code( Result::eErrorSurfaceLostKHR ), message ) {}
   };
-  static_assert( sizeof( BindImageMemoryInfo ) == sizeof( VkBindImageMemoryInfo ), "struct and wrapper have different size!" );
 
-  using BindImageMemoryInfoKHR = BindImageMemoryInfo;
-
-  struct BindImageMemoryDeviceGroupInfo
+  class NativeWindowInUseKHRError : public SystemError
   {
-    BindImageMemoryDeviceGroupInfo( uint32_t deviceIndexCount_ = 0,
-                                    const uint32_t* pDeviceIndices_ = nullptr,
-                                    uint32_t splitInstanceBindRegionCount_ = 0,
-                                    const Rect2D* pSplitInstanceBindRegions_ = nullptr )
-      : deviceIndexCount( deviceIndexCount_ )
-      , pDeviceIndices( pDeviceIndices_ )
-      , splitInstanceBindRegionCount( splitInstanceBindRegionCount_ )
-      , pSplitInstanceBindRegions( pSplitInstanceBindRegions_ )
-    {
-    }
-
-    BindImageMemoryDeviceGroupInfo( VkBindImageMemoryDeviceGroupInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( BindImageMemoryDeviceGroupInfo ) );
-    }
-
-    BindImageMemoryDeviceGroupInfo& operator=( VkBindImageMemoryDeviceGroupInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( BindImageMemoryDeviceGroupInfo ) );
-      return *this;
-    }
-    BindImageMemoryDeviceGroupInfo& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    BindImageMemoryDeviceGroupInfo& setDeviceIndexCount( uint32_t deviceIndexCount_ )
-    {
-      deviceIndexCount = deviceIndexCount_;
-      return *this;
-    }
-
-    BindImageMemoryDeviceGroupInfo& setPDeviceIndices( const uint32_t* pDeviceIndices_ )
-    {
-      pDeviceIndices = pDeviceIndices_;
-      return *this;
-    }
-
-    BindImageMemoryDeviceGroupInfo& setSplitInstanceBindRegionCount( uint32_t splitInstanceBindRegionCount_ )
-    {
-      splitInstanceBindRegionCount = splitInstanceBindRegionCount_;
-      return *this;
-    }
-
-    BindImageMemoryDeviceGroupInfo& setPSplitInstanceBindRegions( const Rect2D* pSplitInstanceBindRegions_ )
-    {
-      pSplitInstanceBindRegions = pSplitInstanceBindRegions_;
-      return *this;
-    }
-
-    operator VkBindImageMemoryDeviceGroupInfo const&() const
-    {
-      return *reinterpret_cast<const VkBindImageMemoryDeviceGroupInfo*>(this);
-    }
-
-    operator VkBindImageMemoryDeviceGroupInfo &()
-    {
-      return *reinterpret_cast<VkBindImageMemoryDeviceGroupInfo*>(this);
-    }
-
-    bool operator==( BindImageMemoryDeviceGroupInfo const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( deviceIndexCount == rhs.deviceIndexCount )
-          && ( pDeviceIndices == rhs.pDeviceIndices )
-          && ( splitInstanceBindRegionCount == rhs.splitInstanceBindRegionCount )
-          && ( pSplitInstanceBindRegions == rhs.pSplitInstanceBindRegions );
-    }
-
-    bool operator!=( BindImageMemoryDeviceGroupInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eBindImageMemoryDeviceGroupInfo;
-
   public:
-    const void* pNext = nullptr;
-    uint32_t deviceIndexCount;
-    const uint32_t* pDeviceIndices;
-    uint32_t splitInstanceBindRegionCount;
-    const Rect2D* pSplitInstanceBindRegions;
+    NativeWindowInUseKHRError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorNativeWindowInUseKHR ), message ) {}
+    NativeWindowInUseKHRError( char const * message )
+      : SystemError( make_error_code( Result::eErrorNativeWindowInUseKHR ), message ) {}
   };
-  static_assert( sizeof( BindImageMemoryDeviceGroupInfo ) == sizeof( VkBindImageMemoryDeviceGroupInfo ), "struct and wrapper have different size!" );
 
-  using BindImageMemoryDeviceGroupInfoKHR = BindImageMemoryDeviceGroupInfo;
-
-  struct DeviceGroupRenderPassBeginInfo
+  class OutOfDateKHRError : public SystemError
   {
-    DeviceGroupRenderPassBeginInfo( uint32_t deviceMask_ = 0,
-                                    uint32_t deviceRenderAreaCount_ = 0,
-                                    const Rect2D* pDeviceRenderAreas_ = nullptr )
-      : deviceMask( deviceMask_ )
-      , deviceRenderAreaCount( deviceRenderAreaCount_ )
-      , pDeviceRenderAreas( pDeviceRenderAreas_ )
-    {
-    }
-
-    DeviceGroupRenderPassBeginInfo( VkDeviceGroupRenderPassBeginInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DeviceGroupRenderPassBeginInfo ) );
-    }
-
-    DeviceGroupRenderPassBeginInfo& operator=( VkDeviceGroupRenderPassBeginInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DeviceGroupRenderPassBeginInfo ) );
-      return *this;
-    }
-    DeviceGroupRenderPassBeginInfo& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    DeviceGroupRenderPassBeginInfo& setDeviceMask( uint32_t deviceMask_ )
-    {
-      deviceMask = deviceMask_;
-      return *this;
-    }
-
-    DeviceGroupRenderPassBeginInfo& setDeviceRenderAreaCount( uint32_t deviceRenderAreaCount_ )
-    {
-      deviceRenderAreaCount = deviceRenderAreaCount_;
-      return *this;
-    }
-
-    DeviceGroupRenderPassBeginInfo& setPDeviceRenderAreas( const Rect2D* pDeviceRenderAreas_ )
-    {
-      pDeviceRenderAreas = pDeviceRenderAreas_;
-      return *this;
-    }
-
-    operator VkDeviceGroupRenderPassBeginInfo const&() const
-    {
-      return *reinterpret_cast<const VkDeviceGroupRenderPassBeginInfo*>(this);
-    }
-
-    operator VkDeviceGroupRenderPassBeginInfo &()
-    {
-      return *reinterpret_cast<VkDeviceGroupRenderPassBeginInfo*>(this);
-    }
-
-    bool operator==( DeviceGroupRenderPassBeginInfo const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( deviceMask == rhs.deviceMask )
-          && ( deviceRenderAreaCount == rhs.deviceRenderAreaCount )
-          && ( pDeviceRenderAreas == rhs.pDeviceRenderAreas );
-    }
-
-    bool operator!=( DeviceGroupRenderPassBeginInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eDeviceGroupRenderPassBeginInfo;
-
   public:
-    const void* pNext = nullptr;
-    uint32_t deviceMask;
-    uint32_t deviceRenderAreaCount;
-    const Rect2D* pDeviceRenderAreas;
+    OutOfDateKHRError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorOutOfDateKHR ), message ) {}
+    OutOfDateKHRError( char const * message )
+      : SystemError( make_error_code( Result::eErrorOutOfDateKHR ), message ) {}
   };
-  static_assert( sizeof( DeviceGroupRenderPassBeginInfo ) == sizeof( VkDeviceGroupRenderPassBeginInfo ), "struct and wrapper have different size!" );
 
-  using DeviceGroupRenderPassBeginInfoKHR = DeviceGroupRenderPassBeginInfo;
-
-  struct DeviceGroupCommandBufferBeginInfo
+  class IncompatibleDisplayKHRError : public SystemError
   {
-    DeviceGroupCommandBufferBeginInfo( uint32_t deviceMask_ = 0 )
-      : deviceMask( deviceMask_ )
-    {
-    }
-
-    DeviceGroupCommandBufferBeginInfo( VkDeviceGroupCommandBufferBeginInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DeviceGroupCommandBufferBeginInfo ) );
-    }
-
-    DeviceGroupCommandBufferBeginInfo& operator=( VkDeviceGroupCommandBufferBeginInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DeviceGroupCommandBufferBeginInfo ) );
-      return *this;
-    }
-    DeviceGroupCommandBufferBeginInfo& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    DeviceGroupCommandBufferBeginInfo& setDeviceMask( uint32_t deviceMask_ )
-    {
-      deviceMask = deviceMask_;
-      return *this;
-    }
-
-    operator VkDeviceGroupCommandBufferBeginInfo const&() const
-    {
-      return *reinterpret_cast<const VkDeviceGroupCommandBufferBeginInfo*>(this);
-    }
-
-    operator VkDeviceGroupCommandBufferBeginInfo &()
-    {
-      return *reinterpret_cast<VkDeviceGroupCommandBufferBeginInfo*>(this);
-    }
-
-    bool operator==( DeviceGroupCommandBufferBeginInfo const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( deviceMask == rhs.deviceMask );
-    }
-
-    bool operator!=( DeviceGroupCommandBufferBeginInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eDeviceGroupCommandBufferBeginInfo;
-
   public:
-    const void* pNext = nullptr;
-    uint32_t deviceMask;
+    IncompatibleDisplayKHRError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorIncompatibleDisplayKHR ), message ) {}
+    IncompatibleDisplayKHRError( char const * message )
+      : SystemError( make_error_code( Result::eErrorIncompatibleDisplayKHR ), message ) {}
   };
-  static_assert( sizeof( DeviceGroupCommandBufferBeginInfo ) == sizeof( VkDeviceGroupCommandBufferBeginInfo ), "struct and wrapper have different size!" );
 
-  using DeviceGroupCommandBufferBeginInfoKHR = DeviceGroupCommandBufferBeginInfo;
-
-  struct DeviceGroupSubmitInfo
+  class ValidationFailedEXTError : public SystemError
   {
-    DeviceGroupSubmitInfo( uint32_t waitSemaphoreCount_ = 0,
-                           const uint32_t* pWaitSemaphoreDeviceIndices_ = nullptr,
-                           uint32_t commandBufferCount_ = 0,
-                           const uint32_t* pCommandBufferDeviceMasks_ = nullptr,
-                           uint32_t signalSemaphoreCount_ = 0,
-                           const uint32_t* pSignalSemaphoreDeviceIndices_ = nullptr )
-      : waitSemaphoreCount( waitSemaphoreCount_ )
-      , pWaitSemaphoreDeviceIndices( pWaitSemaphoreDeviceIndices_ )
-      , commandBufferCount( commandBufferCount_ )
-      , pCommandBufferDeviceMasks( pCommandBufferDeviceMasks_ )
-      , signalSemaphoreCount( signalSemaphoreCount_ )
-      , pSignalSemaphoreDeviceIndices( pSignalSemaphoreDeviceIndices_ )
-    {
-    }
-
-    DeviceGroupSubmitInfo( VkDeviceGroupSubmitInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DeviceGroupSubmitInfo ) );
-    }
-
-    DeviceGroupSubmitInfo& operator=( VkDeviceGroupSubmitInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DeviceGroupSubmitInfo ) );
-      return *this;
-    }
-    DeviceGroupSubmitInfo& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    DeviceGroupSubmitInfo& setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ )
-    {
-      waitSemaphoreCount = waitSemaphoreCount_;
-      return *this;
-    }
-
-    DeviceGroupSubmitInfo& setPWaitSemaphoreDeviceIndices( const uint32_t* pWaitSemaphoreDeviceIndices_ )
-    {
-      pWaitSemaphoreDeviceIndices = pWaitSemaphoreDeviceIndices_;
-      return *this;
-    }
-
-    DeviceGroupSubmitInfo& setCommandBufferCount( uint32_t commandBufferCount_ )
-    {
-      commandBufferCount = commandBufferCount_;
-      return *this;
-    }
-
-    DeviceGroupSubmitInfo& setPCommandBufferDeviceMasks( const uint32_t* pCommandBufferDeviceMasks_ )
-    {
-      pCommandBufferDeviceMasks = pCommandBufferDeviceMasks_;
-      return *this;
-    }
-
-    DeviceGroupSubmitInfo& setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ )
-    {
-      signalSemaphoreCount = signalSemaphoreCount_;
-      return *this;
-    }
-
-    DeviceGroupSubmitInfo& setPSignalSemaphoreDeviceIndices( const uint32_t* pSignalSemaphoreDeviceIndices_ )
-    {
-      pSignalSemaphoreDeviceIndices = pSignalSemaphoreDeviceIndices_;
-      return *this;
-    }
-
-    operator VkDeviceGroupSubmitInfo const&() const
-    {
-      return *reinterpret_cast<const VkDeviceGroupSubmitInfo*>(this);
-    }
-
-    operator VkDeviceGroupSubmitInfo &()
-    {
-      return *reinterpret_cast<VkDeviceGroupSubmitInfo*>(this);
-    }
-
-    bool operator==( DeviceGroupSubmitInfo const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( waitSemaphoreCount == rhs.waitSemaphoreCount )
-          && ( pWaitSemaphoreDeviceIndices == rhs.pWaitSemaphoreDeviceIndices )
-          && ( commandBufferCount == rhs.commandBufferCount )
-          && ( pCommandBufferDeviceMasks == rhs.pCommandBufferDeviceMasks )
-          && ( signalSemaphoreCount == rhs.signalSemaphoreCount )
-          && ( pSignalSemaphoreDeviceIndices == rhs.pSignalSemaphoreDeviceIndices );
-    }
-
-    bool operator!=( DeviceGroupSubmitInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eDeviceGroupSubmitInfo;
-
   public:
-    const void* pNext = nullptr;
-    uint32_t waitSemaphoreCount;
-    const uint32_t* pWaitSemaphoreDeviceIndices;
-    uint32_t commandBufferCount;
-    const uint32_t* pCommandBufferDeviceMasks;
-    uint32_t signalSemaphoreCount;
-    const uint32_t* pSignalSemaphoreDeviceIndices;
+    ValidationFailedEXTError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorValidationFailedEXT ), message ) {}
+    ValidationFailedEXTError( char const * message )
+      : SystemError( make_error_code( Result::eErrorValidationFailedEXT ), message ) {}
   };
-  static_assert( sizeof( DeviceGroupSubmitInfo ) == sizeof( VkDeviceGroupSubmitInfo ), "struct and wrapper have different size!" );
 
-  using DeviceGroupSubmitInfoKHR = DeviceGroupSubmitInfo;
-
-  struct DeviceGroupBindSparseInfo
+  class InvalidShaderNVError : public SystemError
   {
-    DeviceGroupBindSparseInfo( uint32_t resourceDeviceIndex_ = 0,
-                               uint32_t memoryDeviceIndex_ = 0 )
-      : resourceDeviceIndex( resourceDeviceIndex_ )
-      , memoryDeviceIndex( memoryDeviceIndex_ )
-    {
-    }
-
-    DeviceGroupBindSparseInfo( VkDeviceGroupBindSparseInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DeviceGroupBindSparseInfo ) );
-    }
-
-    DeviceGroupBindSparseInfo& operator=( VkDeviceGroupBindSparseInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DeviceGroupBindSparseInfo ) );
-      return *this;
-    }
-    DeviceGroupBindSparseInfo& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    DeviceGroupBindSparseInfo& setResourceDeviceIndex( uint32_t resourceDeviceIndex_ )
-    {
-      resourceDeviceIndex = resourceDeviceIndex_;
-      return *this;
-    }
-
-    DeviceGroupBindSparseInfo& setMemoryDeviceIndex( uint32_t memoryDeviceIndex_ )
-    {
-      memoryDeviceIndex = memoryDeviceIndex_;
-      return *this;
-    }
-
-    operator VkDeviceGroupBindSparseInfo const&() const
-    {
-      return *reinterpret_cast<const VkDeviceGroupBindSparseInfo*>(this);
-    }
-
-    operator VkDeviceGroupBindSparseInfo &()
-    {
-      return *reinterpret_cast<VkDeviceGroupBindSparseInfo*>(this);
-    }
-
-    bool operator==( DeviceGroupBindSparseInfo const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( resourceDeviceIndex == rhs.resourceDeviceIndex )
-          && ( memoryDeviceIndex == rhs.memoryDeviceIndex );
-    }
-
-    bool operator!=( DeviceGroupBindSparseInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eDeviceGroupBindSparseInfo;
-
   public:
-    const void* pNext = nullptr;
-    uint32_t resourceDeviceIndex;
-    uint32_t memoryDeviceIndex;
+    InvalidShaderNVError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorInvalidShaderNV ), message ) {}
+    InvalidShaderNVError( char const * message )
+      : SystemError( make_error_code( Result::eErrorInvalidShaderNV ), message ) {}
   };
-  static_assert( sizeof( DeviceGroupBindSparseInfo ) == sizeof( VkDeviceGroupBindSparseInfo ), "struct and wrapper have different size!" );
 
-  using DeviceGroupBindSparseInfoKHR = DeviceGroupBindSparseInfo;
-
-  struct ImageSwapchainCreateInfoKHR
+  class IncompatibleVersionKHRError : public SystemError
   {
-    ImageSwapchainCreateInfoKHR( SwapchainKHR swapchain_ = SwapchainKHR() )
-      : swapchain( swapchain_ )
-    {
-    }
-
-    ImageSwapchainCreateInfoKHR( VkImageSwapchainCreateInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ImageSwapchainCreateInfoKHR ) );
-    }
-
-    ImageSwapchainCreateInfoKHR& operator=( VkImageSwapchainCreateInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ImageSwapchainCreateInfoKHR ) );
-      return *this;
-    }
-    ImageSwapchainCreateInfoKHR& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    ImageSwapchainCreateInfoKHR& setSwapchain( SwapchainKHR swapchain_ )
-    {
-      swapchain = swapchain_;
-      return *this;
-    }
-
-    operator VkImageSwapchainCreateInfoKHR const&() const
-    {
-      return *reinterpret_cast<const VkImageSwapchainCreateInfoKHR*>(this);
-    }
-
-    operator VkImageSwapchainCreateInfoKHR &()
-    {
-      return *reinterpret_cast<VkImageSwapchainCreateInfoKHR*>(this);
-    }
-
-    bool operator==( ImageSwapchainCreateInfoKHR const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( swapchain == rhs.swapchain );
-    }
-
-    bool operator!=( ImageSwapchainCreateInfoKHR const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eImageSwapchainCreateInfoKHR;
-
   public:
-    const void* pNext = nullptr;
-    SwapchainKHR swapchain;
+    IncompatibleVersionKHRError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorIncompatibleVersionKHR ), message ) {}
+    IncompatibleVersionKHRError( char const * message )
+      : SystemError( make_error_code( Result::eErrorIncompatibleVersionKHR ), message ) {}
   };
-  static_assert( sizeof( ImageSwapchainCreateInfoKHR ) == sizeof( VkImageSwapchainCreateInfoKHR ), "struct and wrapper have different size!" );
 
-  struct BindImageMemorySwapchainInfoKHR
+  class InvalidDrmFormatModifierPlaneLayoutEXTError : public SystemError
   {
-    BindImageMemorySwapchainInfoKHR( SwapchainKHR swapchain_ = SwapchainKHR(),
-                                     uint32_t imageIndex_ = 0 )
-      : swapchain( swapchain_ )
-      , imageIndex( imageIndex_ )
-    {
-    }
-
-    BindImageMemorySwapchainInfoKHR( VkBindImageMemorySwapchainInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( BindImageMemorySwapchainInfoKHR ) );
-    }
-
-    BindImageMemorySwapchainInfoKHR& operator=( VkBindImageMemorySwapchainInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( BindImageMemorySwapchainInfoKHR ) );
-      return *this;
-    }
-    BindImageMemorySwapchainInfoKHR& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    BindImageMemorySwapchainInfoKHR& setSwapchain( SwapchainKHR swapchain_ )
-    {
-      swapchain = swapchain_;
-      return *this;
-    }
-
-    BindImageMemorySwapchainInfoKHR& setImageIndex( uint32_t imageIndex_ )
-    {
-      imageIndex = imageIndex_;
-      return *this;
-    }
-
-    operator VkBindImageMemorySwapchainInfoKHR const&() const
-    {
-      return *reinterpret_cast<const VkBindImageMemorySwapchainInfoKHR*>(this);
-    }
-
-    operator VkBindImageMemorySwapchainInfoKHR &()
-    {
-      return *reinterpret_cast<VkBindImageMemorySwapchainInfoKHR*>(this);
-    }
-
-    bool operator==( BindImageMemorySwapchainInfoKHR const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( swapchain == rhs.swapchain )
-          && ( imageIndex == rhs.imageIndex );
-    }
-
-    bool operator!=( BindImageMemorySwapchainInfoKHR const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eBindImageMemorySwapchainInfoKHR;
-
   public:
-    const void* pNext = nullptr;
-    SwapchainKHR swapchain;
-    uint32_t imageIndex;
+    InvalidDrmFormatModifierPlaneLayoutEXTError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT ), message ) {}
+    InvalidDrmFormatModifierPlaneLayoutEXTError( char const * message )
+      : SystemError( make_error_code( Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT ), message ) {}
   };
-  static_assert( sizeof( BindImageMemorySwapchainInfoKHR ) == sizeof( VkBindImageMemorySwapchainInfoKHR ), "struct and wrapper have different size!" );
 
-  struct AcquireNextImageInfoKHR
+  class NotPermittedEXTError : public SystemError
   {
-    AcquireNextImageInfoKHR( SwapchainKHR swapchain_ = SwapchainKHR(),
-                             uint64_t timeout_ = 0,
-                             Semaphore semaphore_ = Semaphore(),
-                             Fence fence_ = Fence(),
-                             uint32_t deviceMask_ = 0 )
-      : swapchain( swapchain_ )
-      , timeout( timeout_ )
-      , semaphore( semaphore_ )
-      , fence( fence_ )
-      , deviceMask( deviceMask_ )
-    {
-    }
-
-    AcquireNextImageInfoKHR( VkAcquireNextImageInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( AcquireNextImageInfoKHR ) );
-    }
-
-    AcquireNextImageInfoKHR& operator=( VkAcquireNextImageInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( AcquireNextImageInfoKHR ) );
-      return *this;
-    }
-    AcquireNextImageInfoKHR& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    AcquireNextImageInfoKHR& setSwapchain( SwapchainKHR swapchain_ )
-    {
-      swapchain = swapchain_;
-      return *this;
-    }
-
-    AcquireNextImageInfoKHR& setTimeout( uint64_t timeout_ )
-    {
-      timeout = timeout_;
-      return *this;
-    }
-
-    AcquireNextImageInfoKHR& setSemaphore( Semaphore semaphore_ )
-    {
-      semaphore = semaphore_;
-      return *this;
-    }
-
-    AcquireNextImageInfoKHR& setFence( Fence fence_ )
-    {
-      fence = fence_;
-      return *this;
-    }
-
-    AcquireNextImageInfoKHR& setDeviceMask( uint32_t deviceMask_ )
-    {
-      deviceMask = deviceMask_;
-      return *this;
-    }
-
-    operator VkAcquireNextImageInfoKHR const&() const
-    {
-      return *reinterpret_cast<const VkAcquireNextImageInfoKHR*>(this);
-    }
-
-    operator VkAcquireNextImageInfoKHR &()
-    {
-      return *reinterpret_cast<VkAcquireNextImageInfoKHR*>(this);
-    }
-
-    bool operator==( AcquireNextImageInfoKHR const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( swapchain == rhs.swapchain )
-          && ( timeout == rhs.timeout )
-          && ( semaphore == rhs.semaphore )
-          && ( fence == rhs.fence )
-          && ( deviceMask == rhs.deviceMask );
-    }
-
-    bool operator!=( AcquireNextImageInfoKHR const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eAcquireNextImageInfoKHR;
-
   public:
-    const void* pNext = nullptr;
-    SwapchainKHR swapchain;
-    uint64_t timeout;
-    Semaphore semaphore;
-    Fence fence;
-    uint32_t deviceMask;
+    NotPermittedEXTError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorNotPermittedEXT ), message ) {}
+    NotPermittedEXTError( char const * message )
+      : SystemError( make_error_code( Result::eErrorNotPermittedEXT ), message ) {}
   };
-  static_assert( sizeof( AcquireNextImageInfoKHR ) == sizeof( VkAcquireNextImageInfoKHR ), "struct and wrapper have different size!" );
 
-  struct HdrMetadataEXT
+  class FullScreenExclusiveModeLostEXTError : public SystemError
   {
-    HdrMetadataEXT( XYColorEXT displayPrimaryRed_ = XYColorEXT(),
-                    XYColorEXT displayPrimaryGreen_ = XYColorEXT(),
-                    XYColorEXT displayPrimaryBlue_ = XYColorEXT(),
-                    XYColorEXT whitePoint_ = XYColorEXT(),
-                    float maxLuminance_ = 0,
-                    float minLuminance_ = 0,
-                    float maxContentLightLevel_ = 0,
-                    float maxFrameAverageLightLevel_ = 0 )
-      : displayPrimaryRed( displayPrimaryRed_ )
-      , displayPrimaryGreen( displayPrimaryGreen_ )
-      , displayPrimaryBlue( displayPrimaryBlue_ )
-      , whitePoint( whitePoint_ )
-      , maxLuminance( maxLuminance_ )
-      , minLuminance( minLuminance_ )
-      , maxContentLightLevel( maxContentLightLevel_ )
-      , maxFrameAverageLightLevel( maxFrameAverageLightLevel_ )
-    {
-    }
-
-    HdrMetadataEXT( VkHdrMetadataEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( HdrMetadataEXT ) );
-    }
-
-    HdrMetadataEXT& operator=( VkHdrMetadataEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( HdrMetadataEXT ) );
-      return *this;
-    }
-    HdrMetadataEXT& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    HdrMetadataEXT& setDisplayPrimaryRed( XYColorEXT displayPrimaryRed_ )
-    {
-      displayPrimaryRed = displayPrimaryRed_;
-      return *this;
-    }
-
-    HdrMetadataEXT& setDisplayPrimaryGreen( XYColorEXT displayPrimaryGreen_ )
-    {
-      displayPrimaryGreen = displayPrimaryGreen_;
-      return *this;
-    }
-
-    HdrMetadataEXT& setDisplayPrimaryBlue( XYColorEXT displayPrimaryBlue_ )
-    {
-      displayPrimaryBlue = displayPrimaryBlue_;
-      return *this;
-    }
-
-    HdrMetadataEXT& setWhitePoint( XYColorEXT whitePoint_ )
-    {
-      whitePoint = whitePoint_;
-      return *this;
-    }
-
-    HdrMetadataEXT& setMaxLuminance( float maxLuminance_ )
-    {
-      maxLuminance = maxLuminance_;
-      return *this;
-    }
-
-    HdrMetadataEXT& setMinLuminance( float minLuminance_ )
-    {
-      minLuminance = minLuminance_;
-      return *this;
-    }
-
-    HdrMetadataEXT& setMaxContentLightLevel( float maxContentLightLevel_ )
-    {
-      maxContentLightLevel = maxContentLightLevel_;
-      return *this;
-    }
-
-    HdrMetadataEXT& setMaxFrameAverageLightLevel( float maxFrameAverageLightLevel_ )
-    {
-      maxFrameAverageLightLevel = maxFrameAverageLightLevel_;
-      return *this;
-    }
-
-    operator VkHdrMetadataEXT const&() const
-    {
-      return *reinterpret_cast<const VkHdrMetadataEXT*>(this);
-    }
-
-    operator VkHdrMetadataEXT &()
-    {
-      return *reinterpret_cast<VkHdrMetadataEXT*>(this);
-    }
-
-    bool operator==( HdrMetadataEXT const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( displayPrimaryRed == rhs.displayPrimaryRed )
-          && ( displayPrimaryGreen == rhs.displayPrimaryGreen )
-          && ( displayPrimaryBlue == rhs.displayPrimaryBlue )
-          && ( whitePoint == rhs.whitePoint )
-          && ( maxLuminance == rhs.maxLuminance )
-          && ( minLuminance == rhs.minLuminance )
-          && ( maxContentLightLevel == rhs.maxContentLightLevel )
-          && ( maxFrameAverageLightLevel == rhs.maxFrameAverageLightLevel );
-    }
-
-    bool operator!=( HdrMetadataEXT const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eHdrMetadataEXT;
-
   public:
-    const void* pNext = nullptr;
-    XYColorEXT displayPrimaryRed;
-    XYColorEXT displayPrimaryGreen;
-    XYColorEXT displayPrimaryBlue;
-    XYColorEXT whitePoint;
-    float maxLuminance;
-    float minLuminance;
-    float maxContentLightLevel;
-    float maxFrameAverageLightLevel;
+    FullScreenExclusiveModeLostEXTError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorFullScreenExclusiveModeLostEXT ), message ) {}
+    FullScreenExclusiveModeLostEXTError( char const * message )
+      : SystemError( make_error_code( Result::eErrorFullScreenExclusiveModeLostEXT ), message ) {}
   };
-  static_assert( sizeof( HdrMetadataEXT ) == sizeof( VkHdrMetadataEXT ), "struct and wrapper have different size!" );
 
-  struct PresentTimesInfoGOOGLE
-  {
-    PresentTimesInfoGOOGLE( uint32_t swapchainCount_ = 0,
-                            const PresentTimeGOOGLE* pTimes_ = nullptr )
-      : swapchainCount( swapchainCount_ )
-      , pTimes( pTimes_ )
-    {
-    }
 
-    PresentTimesInfoGOOGLE( VkPresentTimesInfoGOOGLE const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PresentTimesInfoGOOGLE ) );
-    }
-
-    PresentTimesInfoGOOGLE& operator=( VkPresentTimesInfoGOOGLE const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PresentTimesInfoGOOGLE ) );
-      return *this;
-    }
-    PresentTimesInfoGOOGLE& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    PresentTimesInfoGOOGLE& setSwapchainCount( uint32_t swapchainCount_ )
-    {
-      swapchainCount = swapchainCount_;
-      return *this;
-    }
-
-    PresentTimesInfoGOOGLE& setPTimes( const PresentTimeGOOGLE* pTimes_ )
-    {
-      pTimes = pTimes_;
-      return *this;
-    }
-
-    operator VkPresentTimesInfoGOOGLE const&() const
-    {
-      return *reinterpret_cast<const VkPresentTimesInfoGOOGLE*>(this);
-    }
-
-    operator VkPresentTimesInfoGOOGLE &()
-    {
-      return *reinterpret_cast<VkPresentTimesInfoGOOGLE*>(this);
-    }
-
-    bool operator==( PresentTimesInfoGOOGLE const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( swapchainCount == rhs.swapchainCount )
-          && ( pTimes == rhs.pTimes );
-    }
-
-    bool operator!=( PresentTimesInfoGOOGLE const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePresentTimesInfoGOOGLE;
-
-  public:
-    const void* pNext = nullptr;
-    uint32_t swapchainCount;
-    const PresentTimeGOOGLE* pTimes;
-  };
-  static_assert( sizeof( PresentTimesInfoGOOGLE ) == sizeof( VkPresentTimesInfoGOOGLE ), "struct and wrapper have different size!" );
-
-#ifdef VK_USE_PLATFORM_IOS_MVK
-  struct IOSSurfaceCreateInfoMVK
+  [[noreturn]] static void throwResultException( Result result, char const * message )
   {
-    IOSSurfaceCreateInfoMVK( IOSSurfaceCreateFlagsMVK flags_ = IOSSurfaceCreateFlagsMVK(),
-                             const void* pView_ = nullptr )
-      : flags( flags_ )
-      , pView( pView_ )
+    switch ( result )
     {
+      case Result::eErrorOutOfHostMemory: throw OutOfHostMemoryError( message );
+      case Result::eErrorOutOfDeviceMemory: throw OutOfDeviceMemoryError( message );
+      case Result::eErrorInitializationFailed: throw InitializationFailedError( message );
+      case Result::eErrorDeviceLost: throw DeviceLostError( message );
+      case Result::eErrorMemoryMapFailed: throw MemoryMapFailedError( message );
+      case Result::eErrorLayerNotPresent: throw LayerNotPresentError( message );
+      case Result::eErrorExtensionNotPresent: throw ExtensionNotPresentError( message );
+      case Result::eErrorFeatureNotPresent: throw FeatureNotPresentError( message );
+      case Result::eErrorIncompatibleDriver: throw IncompatibleDriverError( message );
+      case Result::eErrorTooManyObjects: throw TooManyObjectsError( message );
+      case Result::eErrorFormatNotSupported: throw FormatNotSupportedError( message );
+      case Result::eErrorFragmentedPool: throw FragmentedPoolError( message );
+      case Result::eErrorUnknown: throw UnknownError( message );
+      case Result::eErrorOutOfPoolMemory: throw OutOfPoolMemoryError( message );
+      case Result::eErrorInvalidExternalHandle: throw InvalidExternalHandleError( message );
+      case Result::eErrorFragmentation: throw FragmentationError( message );
+      case Result::eErrorInvalidOpaqueCaptureAddress: throw InvalidOpaqueCaptureAddressError( message );
+      case Result::eErrorSurfaceLostKHR: throw SurfaceLostKHRError( message );
+      case Result::eErrorNativeWindowInUseKHR: throw NativeWindowInUseKHRError( message );
+      case Result::eErrorOutOfDateKHR: throw OutOfDateKHRError( message );
+      case Result::eErrorIncompatibleDisplayKHR: throw IncompatibleDisplayKHRError( message );
+      case Result::eErrorValidationFailedEXT: throw ValidationFailedEXTError( message );
+      case Result::eErrorInvalidShaderNV: throw InvalidShaderNVError( message );
+      case Result::eErrorIncompatibleVersionKHR: throw IncompatibleVersionKHRError( message );
+      case Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT: throw InvalidDrmFormatModifierPlaneLayoutEXTError( message );
+      case Result::eErrorNotPermittedEXT: throw NotPermittedEXTError( message );
+      case Result::eErrorFullScreenExclusiveModeLostEXT: throw FullScreenExclusiveModeLostEXTError( message );
+      default: throw SystemError( make_error_code( result ) );
     }
+  }
+#endif
 
-    IOSSurfaceCreateInfoMVK( VkIOSSurfaceCreateInfoMVK const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( IOSSurfaceCreateInfoMVK ) );
-    }
-
-    IOSSurfaceCreateInfoMVK& operator=( VkIOSSurfaceCreateInfoMVK const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( IOSSurfaceCreateInfoMVK ) );
-      return *this;
-    }
-    IOSSurfaceCreateInfoMVK& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    IOSSurfaceCreateInfoMVK& setFlags( IOSSurfaceCreateFlagsMVK flags_ )
-    {
-      flags = flags_;
-      return *this;
-    }
-
-    IOSSurfaceCreateInfoMVK& setPView( const void* pView_ )
-    {
-      pView = pView_;
-      return *this;
-    }
-
-    operator VkIOSSurfaceCreateInfoMVK const&() const
-    {
-      return *reinterpret_cast<const VkIOSSurfaceCreateInfoMVK*>(this);
-    }
-
-    operator VkIOSSurfaceCreateInfoMVK &()
-    {
-      return *reinterpret_cast<VkIOSSurfaceCreateInfoMVK*>(this);
-    }
-
-    bool operator==( IOSSurfaceCreateInfoMVK const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( pView == rhs.pView );
-    }
-
-    bool operator!=( IOSSurfaceCreateInfoMVK const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eIosSurfaceCreateInfoMVK;
-
-  public:
-    const void* pNext = nullptr;
-    IOSSurfaceCreateFlagsMVK flags;
-    const void* pView;
-  };
-  static_assert( sizeof( IOSSurfaceCreateInfoMVK ) == sizeof( VkIOSSurfaceCreateInfoMVK ), "struct and wrapper have different size!" );
-#endif /*VK_USE_PLATFORM_IOS_MVK*/
+  template <typename T> void ignore(T const&) VULKAN_HPP_NOEXCEPT {}
 
-#ifdef VK_USE_PLATFORM_MACOS_MVK
-  struct MacOSSurfaceCreateInfoMVK
+  template <typename T>
+  struct ResultValue
   {
-    MacOSSurfaceCreateInfoMVK( MacOSSurfaceCreateFlagsMVK flags_ = MacOSSurfaceCreateFlagsMVK(),
-                               const void* pView_ = nullptr )
-      : flags( flags_ )
-      , pView( pView_ )
-    {
-    }
+#ifdef VULKAN_HPP_HAS_NOEXCEPT
+    ResultValue( Result r, T & v ) VULKAN_HPP_NOEXCEPT(VULKAN_HPP_NOEXCEPT(T(v)))
+#else
+    ResultValue( Result r, T & v )
+#endif
+      : result( r )
+      , value( v )
+    {}
 
-    MacOSSurfaceCreateInfoMVK( VkMacOSSurfaceCreateInfoMVK const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( MacOSSurfaceCreateInfoMVK ) );
-    }
+#ifdef VULKAN_HPP_HAS_NOEXCEPT
+    ResultValue( Result r, T && v ) VULKAN_HPP_NOEXCEPT(VULKAN_HPP_NOEXCEPT(T(std::move(v))))
+#else
+    ResultValue( Result r, T && v )
+#endif
+      : result( r )
+      , value( std::move( v ) )
+    {}
 
-    MacOSSurfaceCreateInfoMVK& operator=( VkMacOSSurfaceCreateInfoMVK const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( MacOSSurfaceCreateInfoMVK ) );
-      return *this;
-    }
-    MacOSSurfaceCreateInfoMVK& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
+    Result  result;
+    T       value;
 
-    MacOSSurfaceCreateInfoMVK& setFlags( MacOSSurfaceCreateFlagsMVK flags_ )
-    {
-      flags = flags_;
-      return *this;
-    }
+    operator std::tuple<Result&, T&>() VULKAN_HPP_NOEXCEPT { return std::tuple<Result&, T&>(result, value); }
 
-    MacOSSurfaceCreateInfoMVK& setPView( const void* pView_ )
+#if !defined(VULKAN_HPP_DISABLE_IMPLICIT_RESULT_VALUE_CAST)
+    VULKAN_HPP_DEPRECATED("Implicit-cast operators on vk::ResultValue are deprecated. Explicitly access the value as member of ResultValue.")
+    operator T const& () const & VULKAN_HPP_NOEXCEPT
     {
-      pView = pView_;
-      return *this;
+      return value;
     }
 
-    operator VkMacOSSurfaceCreateInfoMVK const&() const
+    VULKAN_HPP_DEPRECATED("Implicit-cast operators on vk::ResultValue are deprecated. Explicitly access the value as member of ResultValue.")
+    operator T& () & VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK*>(this);
-    }
-
-    operator VkMacOSSurfaceCreateInfoMVK &()
-    {
-      return *reinterpret_cast<VkMacOSSurfaceCreateInfoMVK*>(this);
+      return value;
     }
 
-    bool operator==( MacOSSurfaceCreateInfoMVK const& rhs ) const
+    VULKAN_HPP_DEPRECATED("Implicit-cast operators on vk::ResultValue are deprecated. Explicitly access the value as member of ResultValue.")
+    operator T const&& () const && VULKAN_HPP_NOEXCEPT
     {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( pView == rhs.pView );
+      return std::move( value );
     }
 
-    bool operator!=( MacOSSurfaceCreateInfoMVK const& rhs ) const
+    VULKAN_HPP_DEPRECATED("Implicit-cast operators on vk::ResultValue are deprecated. Explicitly access the value as member of ResultValue.")
+    operator T&& () && VULKAN_HPP_NOEXCEPT
     {
-      return !operator==( rhs );
+      return std::move( value );
     }
-
-  private:
-    StructureType sType = StructureType::eMacosSurfaceCreateInfoMVK;
-
-  public:
-    const void* pNext = nullptr;
-    MacOSSurfaceCreateFlagsMVK flags;
-    const void* pView;
+#endif
   };
-  static_assert( sizeof( MacOSSurfaceCreateInfoMVK ) == sizeof( VkMacOSSurfaceCreateInfoMVK ), "struct and wrapper have different size!" );
-#endif /*VK_USE_PLATFORM_MACOS_MVK*/
 
-  struct PipelineViewportWScalingStateCreateInfoNV
+#if !defined(VULKAN_HPP_NO_SMART_HANDLE)
+  template <typename Type, typename Dispatch>
+  struct ResultValue<UniqueHandle<Type,Dispatch>>
   {
-    PipelineViewportWScalingStateCreateInfoNV( Bool32 viewportWScalingEnable_ = 0,
-                                               uint32_t viewportCount_ = 0,
-                                               const ViewportWScalingNV* pViewportWScalings_ = nullptr )
-      : viewportWScalingEnable( viewportWScalingEnable_ )
-      , viewportCount( viewportCount_ )
-      , pViewportWScalings( pViewportWScalings_ )
-    {
-    }
+#ifdef VULKAN_HPP_HAS_NOEXCEPT
+    ResultValue(Result r, UniqueHandle<Type, Dispatch> && v) VULKAN_HPP_NOEXCEPT
+#else
+    ResultValue(Result r, UniqueHandle<Type, Dispatch> && v)
+#endif
+      : result(r)
+      , value(std::move(v))
+    {}
 
-    PipelineViewportWScalingStateCreateInfoNV( VkPipelineViewportWScalingStateCreateInfoNV const & rhs )
+    std::tuple<Result, UniqueHandle<Type, Dispatch>> asTuple()
     {
-      memcpy( this, &rhs, sizeof( PipelineViewportWScalingStateCreateInfoNV ) );
+      return std::make_tuple( result, std::move( value ) );
     }
 
-    PipelineViewportWScalingStateCreateInfoNV& operator=( VkPipelineViewportWScalingStateCreateInfoNV const & rhs )
+#  if !defined(VULKAN_HPP_DISABLE_IMPLICIT_RESULT_VALUE_CAST)
+    VULKAN_HPP_DEPRECATED("Implicit-cast operators on vk::ResultValue are deprecated. Explicitly access the value as member of ResultValue.")
+    operator UniqueHandle<Type, Dispatch>& () & VULKAN_HPP_NOEXCEPT
     {
-      memcpy( this, &rhs, sizeof( PipelineViewportWScalingStateCreateInfoNV ) );
-      return *this;
-    }
-    PipelineViewportWScalingStateCreateInfoNV& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    PipelineViewportWScalingStateCreateInfoNV& setViewportWScalingEnable( Bool32 viewportWScalingEnable_ )
-    {
-      viewportWScalingEnable = viewportWScalingEnable_;
-      return *this;
-    }
-
-    PipelineViewportWScalingStateCreateInfoNV& setViewportCount( uint32_t viewportCount_ )
-    {
-      viewportCount = viewportCount_;
-      return *this;
-    }
-
-    PipelineViewportWScalingStateCreateInfoNV& setPViewportWScalings( const ViewportWScalingNV* pViewportWScalings_ )
-    {
-      pViewportWScalings = pViewportWScalings_;
-      return *this;
-    }
-
-    operator VkPipelineViewportWScalingStateCreateInfoNV const&() const
-    {
-      return *reinterpret_cast<const VkPipelineViewportWScalingStateCreateInfoNV*>(this);
-    }
-
-    operator VkPipelineViewportWScalingStateCreateInfoNV &()
-    {
-      return *reinterpret_cast<VkPipelineViewportWScalingStateCreateInfoNV*>(this);
-    }
-
-    bool operator==( PipelineViewportWScalingStateCreateInfoNV const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( viewportWScalingEnable == rhs.viewportWScalingEnable )
-          && ( viewportCount == rhs.viewportCount )
-          && ( pViewportWScalings == rhs.pViewportWScalings );
+      return value;
     }
 
-    bool operator!=( PipelineViewportWScalingStateCreateInfoNV const& rhs ) const
+    VULKAN_HPP_DEPRECATED("Implicit-cast operators on vk::ResultValue are deprecated. Explicitly access the value as member of ResultValue.")
+    operator UniqueHandle<Type, Dispatch>() VULKAN_HPP_NOEXCEPT
     {
-      return !operator==( rhs );
+      return std::move(value);
     }
+#  endif
 
-  private:
-    StructureType sType = StructureType::ePipelineViewportWScalingStateCreateInfoNV;
-
-  public:
-    const void* pNext = nullptr;
-    Bool32 viewportWScalingEnable;
-    uint32_t viewportCount;
-    const ViewportWScalingNV* pViewportWScalings;
+    Result                        result;
+    UniqueHandle<Type, Dispatch>  value;
   };
-  static_assert( sizeof( PipelineViewportWScalingStateCreateInfoNV ) == sizeof( VkPipelineViewportWScalingStateCreateInfoNV ), "struct and wrapper have different size!" );
 
-  struct PhysicalDeviceDiscardRectanglePropertiesEXT
+  template <typename Type, typename Dispatch>
+  struct ResultValue<std::vector<UniqueHandle<Type, Dispatch>>>
   {
-    PhysicalDeviceDiscardRectanglePropertiesEXT( uint32_t maxDiscardRectangles_ = 0 )
-      : maxDiscardRectangles( maxDiscardRectangles_ )
-    {
-    }
+#  ifdef VULKAN_HPP_HAS_NOEXCEPT
+    ResultValue( Result r, std::vector<UniqueHandle<Type, Dispatch>> && v ) VULKAN_HPP_NOEXCEPT
+#  else
+    ResultValue( Result r, std::vector<UniqueHandle<Type, Dispatch>> && v )
+#  endif
+      : result( r )
+      , value( std::move( v ) )
+    {}
 
-    PhysicalDeviceDiscardRectanglePropertiesEXT( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceDiscardRectanglePropertiesEXT ) );
-    }
-
-    PhysicalDeviceDiscardRectanglePropertiesEXT& operator=( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceDiscardRectanglePropertiesEXT ) );
-      return *this;
-    }
-    PhysicalDeviceDiscardRectanglePropertiesEXT& setPNext( void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    PhysicalDeviceDiscardRectanglePropertiesEXT& setMaxDiscardRectangles( uint32_t maxDiscardRectangles_ )
-    {
-      maxDiscardRectangles = maxDiscardRectangles_;
-      return *this;
-    }
-
-    operator VkPhysicalDeviceDiscardRectanglePropertiesEXT const&() const
-    {
-      return *reinterpret_cast<const VkPhysicalDeviceDiscardRectanglePropertiesEXT*>(this);
-    }
-
-    operator VkPhysicalDeviceDiscardRectanglePropertiesEXT &()
-    {
-      return *reinterpret_cast<VkPhysicalDeviceDiscardRectanglePropertiesEXT*>(this);
-    }
-
-    bool operator==( PhysicalDeviceDiscardRectanglePropertiesEXT const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( maxDiscardRectangles == rhs.maxDiscardRectangles );
-    }
+    Result                                    result;
+    std::vector<UniqueHandle<Type, Dispatch>> value;
 
-    bool operator!=( PhysicalDeviceDiscardRectanglePropertiesEXT const& rhs ) const
+    operator std::tuple<Result &, std::vector<UniqueHandle<Type, Dispatch>> &>() VULKAN_HPP_NOEXCEPT
     {
-      return !operator==( rhs );
+      return std::tuple<Result &, std::vector<UniqueHandle<Type, Dispatch>> &>( result, value );
     }
-
-  private:
-    StructureType sType = StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT;
-
-  public:
-    void* pNext = nullptr;
-    uint32_t maxDiscardRectangles;
   };
-  static_assert( sizeof( PhysicalDeviceDiscardRectanglePropertiesEXT ) == sizeof( VkPhysicalDeviceDiscardRectanglePropertiesEXT ), "struct and wrapper have different size!" );
+#endif
 
-  struct PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX
+  template <typename T>
+  struct ResultValueType
   {
-    operator VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const&() const
-    {
-      return *reinterpret_cast<const VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX*>(this);
-    }
-
-    operator VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX &()
-    {
-      return *reinterpret_cast<VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX*>(this);
-    }
-
-    bool operator==( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( perViewPositionAllComponents == rhs.perViewPositionAllComponents );
-    }
-
-    bool operator!=( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX;
-
-  public:
-    void* pNext = nullptr;
-    Bool32 perViewPositionAllComponents;
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    typedef ResultValue<T>  type;
+#else
+    typedef T               type;
+#endif
   };
-  static_assert( sizeof( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ) == sizeof( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ), "struct and wrapper have different size!" );
 
-  struct PhysicalDeviceSurfaceInfo2KHR
+  template <>
+  struct ResultValueType<void>
   {
-    PhysicalDeviceSurfaceInfo2KHR( SurfaceKHR surface_ = SurfaceKHR() )
-      : surface( surface_ )
-    {
-    }
-
-    PhysicalDeviceSurfaceInfo2KHR( VkPhysicalDeviceSurfaceInfo2KHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceSurfaceInfo2KHR ) );
-    }
-
-    PhysicalDeviceSurfaceInfo2KHR& operator=( VkPhysicalDeviceSurfaceInfo2KHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceSurfaceInfo2KHR ) );
-      return *this;
-    }
-    PhysicalDeviceSurfaceInfo2KHR& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    PhysicalDeviceSurfaceInfo2KHR& setSurface( SurfaceKHR surface_ )
-    {
-      surface = surface_;
-      return *this;
-    }
-
-    operator VkPhysicalDeviceSurfaceInfo2KHR const&() const
-    {
-      return *reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>(this);
-    }
-
-    operator VkPhysicalDeviceSurfaceInfo2KHR &()
-    {
-      return *reinterpret_cast<VkPhysicalDeviceSurfaceInfo2KHR*>(this);
-    }
-
-    bool operator==( PhysicalDeviceSurfaceInfo2KHR const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( surface == rhs.surface );
-    }
-
-    bool operator!=( PhysicalDeviceSurfaceInfo2KHR const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePhysicalDeviceSurfaceInfo2KHR;
-
-  public:
-    const void* pNext = nullptr;
-    SurfaceKHR surface;
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    typedef Result type;
+#else
+    typedef void   type;
+#endif
   };
-  static_assert( sizeof( PhysicalDeviceSurfaceInfo2KHR ) == sizeof( VkPhysicalDeviceSurfaceInfo2KHR ), "struct and wrapper have different size!" );
 
-  struct DisplayPlaneProperties2KHR
+  VULKAN_HPP_INLINE ResultValueType<void>::type createResultValue( Result result, char const * message )
   {
-    operator VkDisplayPlaneProperties2KHR const&() const
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    ignore(message);
+    VULKAN_HPP_ASSERT_ON_RESULT( result == Result::eSuccess );
+    return result;
+#else
+    if ( result != Result::eSuccess )
     {
-      return *reinterpret_cast<const VkDisplayPlaneProperties2KHR*>(this);
+      throwResultException( result, message );
     }
+#endif
+  }
 
-    operator VkDisplayPlaneProperties2KHR &()
-    {
-      return *reinterpret_cast<VkDisplayPlaneProperties2KHR*>(this);
-    }
-
-    bool operator==( DisplayPlaneProperties2KHR const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( displayPlaneProperties == rhs.displayPlaneProperties );
-    }
-
-    bool operator!=( DisplayPlaneProperties2KHR const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eDisplayPlaneProperties2KHR;
-
-  public:
-    void* pNext = nullptr;
-    DisplayPlanePropertiesKHR displayPlaneProperties;
-  };
-  static_assert( sizeof( DisplayPlaneProperties2KHR ) == sizeof( VkDisplayPlaneProperties2KHR ), "struct and wrapper have different size!" );
-
-  struct DisplayModeProperties2KHR
+  template <typename T>
+  VULKAN_HPP_INLINE typename ResultValueType<T>::type createResultValue( Result result, T & data, char const * message )
   {
-    operator VkDisplayModeProperties2KHR const&() const
-    {
-      return *reinterpret_cast<const VkDisplayModeProperties2KHR*>(this);
-    }
-
-    operator VkDisplayModeProperties2KHR &()
-    {
-      return *reinterpret_cast<VkDisplayModeProperties2KHR*>(this);
-    }
-
-    bool operator==( DisplayModeProperties2KHR const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( displayModeProperties == rhs.displayModeProperties );
-    }
-
-    bool operator!=( DisplayModeProperties2KHR const& rhs ) const
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    ignore(message);
+    VULKAN_HPP_ASSERT_ON_RESULT( result == Result::eSuccess );
+    return ResultValue<T>( result, std::move( data ) );
+#else
+    if ( result != Result::eSuccess )
     {
-      return !operator==( rhs );
+      throwResultException( result, message );
     }
+    return std::move( data );
+#endif
+  }
 
-  private:
-    StructureType sType = StructureType::eDisplayModeProperties2KHR;
-
-  public:
-    void* pNext = nullptr;
-    DisplayModePropertiesKHR displayModeProperties;
-  };
-  static_assert( sizeof( DisplayModeProperties2KHR ) == sizeof( VkDisplayModeProperties2KHR ), "struct and wrapper have different size!" );
-
-  struct DisplayPlaneInfo2KHR
+  VULKAN_HPP_INLINE Result createResultValue( Result result, char const * message, std::initializer_list<Result> successCodes )
   {
-    DisplayPlaneInfo2KHR( DisplayModeKHR mode_ = DisplayModeKHR(),
-                          uint32_t planeIndex_ = 0 )
-      : mode( mode_ )
-      , planeIndex( planeIndex_ )
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    ignore(message);
+    VULKAN_HPP_ASSERT_ON_RESULT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() );
+#else
+    if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() )
     {
+      throwResultException( result, message );
     }
+#endif
+    return result;
+  }
 
-    DisplayPlaneInfo2KHR( VkDisplayPlaneInfo2KHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DisplayPlaneInfo2KHR ) );
-    }
-
-    DisplayPlaneInfo2KHR& operator=( VkDisplayPlaneInfo2KHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DisplayPlaneInfo2KHR ) );
-      return *this;
-    }
-    DisplayPlaneInfo2KHR& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    DisplayPlaneInfo2KHR& setMode( DisplayModeKHR mode_ )
-    {
-      mode = mode_;
-      return *this;
-    }
-
-    DisplayPlaneInfo2KHR& setPlaneIndex( uint32_t planeIndex_ )
-    {
-      planeIndex = planeIndex_;
-      return *this;
-    }
-
-    operator VkDisplayPlaneInfo2KHR const&() const
-    {
-      return *reinterpret_cast<const VkDisplayPlaneInfo2KHR*>(this);
-    }
-
-    operator VkDisplayPlaneInfo2KHR &()
-    {
-      return *reinterpret_cast<VkDisplayPlaneInfo2KHR*>(this);
-    }
-
-    bool operator==( DisplayPlaneInfo2KHR const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( mode == rhs.mode )
-          && ( planeIndex == rhs.planeIndex );
-    }
-
-    bool operator!=( DisplayPlaneInfo2KHR const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eDisplayPlaneInfo2KHR;
-
-  public:
-    const void* pNext = nullptr;
-    DisplayModeKHR mode;
-    uint32_t planeIndex;
-  };
-  static_assert( sizeof( DisplayPlaneInfo2KHR ) == sizeof( VkDisplayPlaneInfo2KHR ), "struct and wrapper have different size!" );
-
-  struct PhysicalDevice16BitStorageFeatures
+  template <typename T>
+  VULKAN_HPP_INLINE ResultValue<T> createResultValue( Result result, T & data, char const * message, std::initializer_list<Result> successCodes )
   {
-    PhysicalDevice16BitStorageFeatures( Bool32 storageBuffer16BitAccess_ = 0,
-                                        Bool32 uniformAndStorageBuffer16BitAccess_ = 0,
-                                        Bool32 storagePushConstant16_ = 0,
-                                        Bool32 storageInputOutput16_ = 0 )
-      : storageBuffer16BitAccess( storageBuffer16BitAccess_ )
-      , uniformAndStorageBuffer16BitAccess( uniformAndStorageBuffer16BitAccess_ )
-      , storagePushConstant16( storagePushConstant16_ )
-      , storageInputOutput16( storageInputOutput16_ )
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    ignore(message);
+    VULKAN_HPP_ASSERT_ON_RESULT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() );
+#else
+    if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() )
     {
+      throwResultException( result, message );
     }
+#endif
+    return ResultValue<T>( result, data );
+  }
 
-    PhysicalDevice16BitStorageFeatures( VkPhysicalDevice16BitStorageFeatures const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDevice16BitStorageFeatures ) );
-    }
-
-    PhysicalDevice16BitStorageFeatures& operator=( VkPhysicalDevice16BitStorageFeatures const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDevice16BitStorageFeatures ) );
-      return *this;
-    }
-    PhysicalDevice16BitStorageFeatures& setPNext( void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    PhysicalDevice16BitStorageFeatures& setStorageBuffer16BitAccess( Bool32 storageBuffer16BitAccess_ )
-    {
-      storageBuffer16BitAccess = storageBuffer16BitAccess_;
-      return *this;
-    }
-
-    PhysicalDevice16BitStorageFeatures& setUniformAndStorageBuffer16BitAccess( Bool32 uniformAndStorageBuffer16BitAccess_ )
-    {
-      uniformAndStorageBuffer16BitAccess = uniformAndStorageBuffer16BitAccess_;
-      return *this;
-    }
-
-    PhysicalDevice16BitStorageFeatures& setStoragePushConstant16( Bool32 storagePushConstant16_ )
-    {
-      storagePushConstant16 = storagePushConstant16_;
-      return *this;
-    }
-
-    PhysicalDevice16BitStorageFeatures& setStorageInputOutput16( Bool32 storageInputOutput16_ )
-    {
-      storageInputOutput16 = storageInputOutput16_;
-      return *this;
-    }
-
-    operator VkPhysicalDevice16BitStorageFeatures const&() const
-    {
-      return *reinterpret_cast<const VkPhysicalDevice16BitStorageFeatures*>(this);
-    }
-
-    operator VkPhysicalDevice16BitStorageFeatures &()
-    {
-      return *reinterpret_cast<VkPhysicalDevice16BitStorageFeatures*>(this);
-    }
-
-    bool operator==( PhysicalDevice16BitStorageFeatures const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( storageBuffer16BitAccess == rhs.storageBuffer16BitAccess )
-          && ( uniformAndStorageBuffer16BitAccess == rhs.uniformAndStorageBuffer16BitAccess )
-          && ( storagePushConstant16 == rhs.storagePushConstant16 )
-          && ( storageInputOutput16 == rhs.storageInputOutput16 );
-    }
-
-    bool operator!=( PhysicalDevice16BitStorageFeatures const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePhysicalDevice16BitStorageFeatures;
-
-  public:
-    void* pNext = nullptr;
-    Bool32 storageBuffer16BitAccess;
-    Bool32 uniformAndStorageBuffer16BitAccess;
-    Bool32 storagePushConstant16;
-    Bool32 storageInputOutput16;
-  };
-  static_assert( sizeof( PhysicalDevice16BitStorageFeatures ) == sizeof( VkPhysicalDevice16BitStorageFeatures ), "struct and wrapper have different size!" );
-
-  using PhysicalDevice16BitStorageFeaturesKHR = PhysicalDevice16BitStorageFeatures;
-
-  struct BufferMemoryRequirementsInfo2
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename T, typename D>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<T,D>>::type createResultValue( Result result, T & data, char const * message, typename UniqueHandleTraits<T,D>::deleter const& deleter )
   {
-    BufferMemoryRequirementsInfo2( Buffer buffer_ = Buffer() )
-      : buffer( buffer_ )
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    ignore(message);
+    VULKAN_HPP_ASSERT_ON_RESULT( result == Result::eSuccess );
+    return ResultValue<UniqueHandle<T,D>>( result, UniqueHandle<T,D>(data, deleter) );
+#else
+    if ( result != Result::eSuccess )
     {
+      throwResultException( result, message );
     }
+    return UniqueHandle<T,D>(data, deleter);
+#endif
+  }
 
-    BufferMemoryRequirementsInfo2( VkBufferMemoryRequirementsInfo2 const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( BufferMemoryRequirementsInfo2 ) );
-    }
-
-    BufferMemoryRequirementsInfo2& operator=( VkBufferMemoryRequirementsInfo2 const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( BufferMemoryRequirementsInfo2 ) );
-      return *this;
-    }
-    BufferMemoryRequirementsInfo2& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    BufferMemoryRequirementsInfo2& setBuffer( Buffer buffer_ )
-    {
-      buffer = buffer_;
-      return *this;
-    }
-
-    operator VkBufferMemoryRequirementsInfo2 const&() const
-    {
-      return *reinterpret_cast<const VkBufferMemoryRequirementsInfo2*>(this);
-    }
-
-    operator VkBufferMemoryRequirementsInfo2 &()
-    {
-      return *reinterpret_cast<VkBufferMemoryRequirementsInfo2*>(this);
-    }
-
-    bool operator==( BufferMemoryRequirementsInfo2 const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( buffer == rhs.buffer );
-    }
-
-    bool operator!=( BufferMemoryRequirementsInfo2 const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eBufferMemoryRequirementsInfo2;
-
-  public:
-    const void* pNext = nullptr;
-    Buffer buffer;
-  };
-  static_assert( sizeof( BufferMemoryRequirementsInfo2 ) == sizeof( VkBufferMemoryRequirementsInfo2 ), "struct and wrapper have different size!" );
-
-  using BufferMemoryRequirementsInfo2KHR = BufferMemoryRequirementsInfo2;
-
-  struct ImageMemoryRequirementsInfo2
+  template <typename T, typename D>
+  VULKAN_HPP_INLINE ResultValue<UniqueHandle<T, D>>
+                    createResultValue( Result                                             result,
+                                       T &                                                data,
+                                       char const *                                       message,
+                                       std::initializer_list<Result>                      successCodes,
+                                       typename UniqueHandleTraits<T, D>::deleter const & deleter )
   {
-    ImageMemoryRequirementsInfo2( Image image_ = Image() )
-      : image( image_ )
+#  ifdef VULKAN_HPP_NO_EXCEPTIONS
+    ignore( message );
+    VULKAN_HPP_ASSERT_ON_RESULT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() );
+#  else
+    if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() )
     {
+      throwResultException( result, message );
     }
+#  endif
+    return ResultValue<UniqueHandle<T, D>>( result, UniqueHandle<T, D>( data, deleter ) );
+  }
 
-    ImageMemoryRequirementsInfo2( VkImageMemoryRequirementsInfo2 const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ImageMemoryRequirementsInfo2 ) );
-    }
-
-    ImageMemoryRequirementsInfo2& operator=( VkImageMemoryRequirementsInfo2 const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ImageMemoryRequirementsInfo2 ) );
-      return *this;
-    }
-    ImageMemoryRequirementsInfo2& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    ImageMemoryRequirementsInfo2& setImage( Image image_ )
-    {
-      image = image_;
-      return *this;
-    }
-
-    operator VkImageMemoryRequirementsInfo2 const&() const
-    {
-      return *reinterpret_cast<const VkImageMemoryRequirementsInfo2*>(this);
-    }
-
-    operator VkImageMemoryRequirementsInfo2 &()
-    {
-      return *reinterpret_cast<VkImageMemoryRequirementsInfo2*>(this);
-    }
-
-    bool operator==( ImageMemoryRequirementsInfo2 const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( image == rhs.image );
-    }
-
-    bool operator!=( ImageMemoryRequirementsInfo2 const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eImageMemoryRequirementsInfo2;
-
-  public:
-    const void* pNext = nullptr;
-    Image image;
-  };
-  static_assert( sizeof( ImageMemoryRequirementsInfo2 ) == sizeof( VkImageMemoryRequirementsInfo2 ), "struct and wrapper have different size!" );
-
-  using ImageMemoryRequirementsInfo2KHR = ImageMemoryRequirementsInfo2;
-
-  struct ImageSparseMemoryRequirementsInfo2
+  template <typename T, typename D>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<T, D>>>::type
+    createResultValue( Result result, std::vector<UniqueHandle<T, D>> && data, char const * message )
   {
-    ImageSparseMemoryRequirementsInfo2( Image image_ = Image() )
-      : image( image_ )
+#  ifdef VULKAN_HPP_NO_EXCEPTIONS
+    ignore( message );
+    VULKAN_HPP_ASSERT_ON_RESULT( result == Result::eSuccess );
+    return ResultValue<std::vector<UniqueHandle<T, D>>>( result, std::move( data ) );
+#  else
+    if ( result != Result::eSuccess )
     {
+      throwResultException( result, message );
     }
+    return std::move( data );
+#  endif
+  }
 
-    ImageSparseMemoryRequirementsInfo2( VkImageSparseMemoryRequirementsInfo2 const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ImageSparseMemoryRequirementsInfo2 ) );
-    }
-
-    ImageSparseMemoryRequirementsInfo2& operator=( VkImageSparseMemoryRequirementsInfo2 const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ImageSparseMemoryRequirementsInfo2 ) );
-      return *this;
-    }
-    ImageSparseMemoryRequirementsInfo2& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    ImageSparseMemoryRequirementsInfo2& setImage( Image image_ )
-    {
-      image = image_;
-      return *this;
-    }
-
-    operator VkImageSparseMemoryRequirementsInfo2 const&() const
-    {
-      return *reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2*>(this);
-    }
-
-    operator VkImageSparseMemoryRequirementsInfo2 &()
-    {
-      return *reinterpret_cast<VkImageSparseMemoryRequirementsInfo2*>(this);
-    }
-
-    bool operator==( ImageSparseMemoryRequirementsInfo2 const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( image == rhs.image );
-    }
-
-    bool operator!=( ImageSparseMemoryRequirementsInfo2 const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eImageSparseMemoryRequirementsInfo2;
-
-  public:
-    const void* pNext = nullptr;
-    Image image;
-  };
-  static_assert( sizeof( ImageSparseMemoryRequirementsInfo2 ) == sizeof( VkImageSparseMemoryRequirementsInfo2 ), "struct and wrapper have different size!" );
-
-  using ImageSparseMemoryRequirementsInfo2KHR = ImageSparseMemoryRequirementsInfo2;
-
-  struct MemoryRequirements2
+  template <typename T, typename D>
+  VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<T, D>>>
+                    createResultValue( Result                             result,
+                                       std::vector<UniqueHandle<T, D>> && data,
+                                       char const *                       message,
+                                       std::initializer_list<Result>      successCodes )
   {
-    operator VkMemoryRequirements2 const&() const
+#  ifdef VULKAN_HPP_NO_EXCEPTIONS
+    ignore( message );
+    VULKAN_HPP_ASSERT_ON_RESULT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() );
+#  else
+    if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() )
     {
-      return *reinterpret_cast<const VkMemoryRequirements2*>(this);
+      throwResultException( result, message );
     }
+#  endif
+    return ResultValue<std::vector<UniqueHandle<T, D>>>( result, std::move( data ) );
+  }
+#endif
 
-    operator VkMemoryRequirements2 &()
-    {
-      return *reinterpret_cast<VkMemoryRequirements2*>(this);
-    }
-
-    bool operator==( MemoryRequirements2 const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( memoryRequirements == rhs.memoryRequirements );
-    }
-
-    bool operator!=( MemoryRequirements2 const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eMemoryRequirements2;
-
-  public:
-    void* pNext = nullptr;
-    MemoryRequirements memoryRequirements;
-  };
-  static_assert( sizeof( MemoryRequirements2 ) == sizeof( VkMemoryRequirements2 ), "struct and wrapper have different size!" );
-
-  using MemoryRequirements2KHR = MemoryRequirements2;
-
-  struct MemoryDedicatedRequirements
+  struct AabbPositionsKHR
   {
-    operator VkMemoryDedicatedRequirements const&() const
-    {
-      return *reinterpret_cast<const VkMemoryDedicatedRequirements*>(this);
-    }
 
-    operator VkMemoryDedicatedRequirements &()
-    {
-      return *reinterpret_cast<VkMemoryDedicatedRequirements*>(this);
-    }
 
-    bool operator==( MemoryDedicatedRequirements const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( prefersDedicatedAllocation == rhs.prefersDedicatedAllocation )
-          && ( requiresDedicatedAllocation == rhs.requiresDedicatedAllocation );
-    }
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR AabbPositionsKHR(float minX_ = {}, float minY_ = {}, float minZ_ = {}, float maxX_ = {}, float maxY_ = {}, float maxZ_ = {}) VULKAN_HPP_NOEXCEPT
+    : minX( minX_ ), minY( minY_ ), minZ( minZ_ ), maxX( maxX_ ), maxY( maxY_ ), maxZ( maxZ_ )
+    {}
 
-    bool operator!=( MemoryDedicatedRequirements const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
+    VULKAN_HPP_CONSTEXPR AabbPositionsKHR( AabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-  private:
-    StructureType sType = StructureType::eMemoryDedicatedRequirements;
-
-  public:
-    void* pNext = nullptr;
-    Bool32 prefersDedicatedAllocation;
-    Bool32 requiresDedicatedAllocation;
-  };
-  static_assert( sizeof( MemoryDedicatedRequirements ) == sizeof( VkMemoryDedicatedRequirements ), "struct and wrapper have different size!" );
-
-  using MemoryDedicatedRequirementsKHR = MemoryDedicatedRequirements;
-
-  struct MemoryDedicatedAllocateInfo
-  {
-    MemoryDedicatedAllocateInfo( Image image_ = Image(),
-                                 Buffer buffer_ = Buffer() )
-      : image( image_ )
-      , buffer( buffer_ )
-    {
-    }
-
-    MemoryDedicatedAllocateInfo( VkMemoryDedicatedAllocateInfo const & rhs )
+    AabbPositionsKHR( VkAabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      memcpy( this, &rhs, sizeof( MemoryDedicatedAllocateInfo ) );
+      *this = rhs;
     }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    MemoryDedicatedAllocateInfo& operator=( VkMemoryDedicatedAllocateInfo const & rhs )
+    AabbPositionsKHR & operator=( VkAabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      memcpy( this, &rhs, sizeof( MemoryDedicatedAllocateInfo ) );
-      return *this;
-    }
-    MemoryDedicatedAllocateInfo& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AabbPositionsKHR const *>( &rhs );
       return *this;
     }
 
-    MemoryDedicatedAllocateInfo& setImage( Image image_ )
+    AabbPositionsKHR & operator=( AabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      image = image_;
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( AabbPositionsKHR ) );
       return *this;
     }
 
-    MemoryDedicatedAllocateInfo& setBuffer( Buffer buffer_ )
+    AabbPositionsKHR & setMinX( float minX_ ) VULKAN_HPP_NOEXCEPT
     {
-      buffer = buffer_;
+      minX = minX_;
       return *this;
     }
 
-    operator VkMemoryDedicatedAllocateInfo const&() const
+    AabbPositionsKHR & setMinY( float minY_ ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkMemoryDedicatedAllocateInfo*>(this);
-    }
-
-    operator VkMemoryDedicatedAllocateInfo &()
-    {
-      return *reinterpret_cast<VkMemoryDedicatedAllocateInfo*>(this);
-    }
-
-    bool operator==( MemoryDedicatedAllocateInfo const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( image == rhs.image )
-          && ( buffer == rhs.buffer );
-    }
-
-    bool operator!=( MemoryDedicatedAllocateInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eMemoryDedicatedAllocateInfo;
-
-  public:
-    const void* pNext = nullptr;
-    Image image;
-    Buffer buffer;
-  };
-  static_assert( sizeof( MemoryDedicatedAllocateInfo ) == sizeof( VkMemoryDedicatedAllocateInfo ), "struct and wrapper have different size!" );
-
-  using MemoryDedicatedAllocateInfoKHR = MemoryDedicatedAllocateInfo;
-
-  struct SamplerYcbcrConversionInfo
-  {
-    SamplerYcbcrConversionInfo( SamplerYcbcrConversion conversion_ = SamplerYcbcrConversion() )
-      : conversion( conversion_ )
-    {
-    }
-
-    SamplerYcbcrConversionInfo( VkSamplerYcbcrConversionInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( SamplerYcbcrConversionInfo ) );
-    }
-
-    SamplerYcbcrConversionInfo& operator=( VkSamplerYcbcrConversionInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( SamplerYcbcrConversionInfo ) );
-      return *this;
-    }
-    SamplerYcbcrConversionInfo& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    SamplerYcbcrConversionInfo& setConversion( SamplerYcbcrConversion conversion_ )
-    {
-      conversion = conversion_;
-      return *this;
-    }
-
-    operator VkSamplerYcbcrConversionInfo const&() const
-    {
-      return *reinterpret_cast<const VkSamplerYcbcrConversionInfo*>(this);
-    }
-
-    operator VkSamplerYcbcrConversionInfo &()
-    {
-      return *reinterpret_cast<VkSamplerYcbcrConversionInfo*>(this);
-    }
-
-    bool operator==( SamplerYcbcrConversionInfo const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( conversion == rhs.conversion );
-    }
-
-    bool operator!=( SamplerYcbcrConversionInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eSamplerYcbcrConversionInfo;
-
-  public:
-    const void* pNext = nullptr;
-    SamplerYcbcrConversion conversion;
-  };
-  static_assert( sizeof( SamplerYcbcrConversionInfo ) == sizeof( VkSamplerYcbcrConversionInfo ), "struct and wrapper have different size!" );
-
-  using SamplerYcbcrConversionInfoKHR = SamplerYcbcrConversionInfo;
-
-  struct PhysicalDeviceSamplerYcbcrConversionFeatures
-  {
-    PhysicalDeviceSamplerYcbcrConversionFeatures( Bool32 samplerYcbcrConversion_ = 0 )
-      : samplerYcbcrConversion( samplerYcbcrConversion_ )
-    {
-    }
-
-    PhysicalDeviceSamplerYcbcrConversionFeatures( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceSamplerYcbcrConversionFeatures ) );
-    }
-
-    PhysicalDeviceSamplerYcbcrConversionFeatures& operator=( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceSamplerYcbcrConversionFeatures ) );
-      return *this;
-    }
-    PhysicalDeviceSamplerYcbcrConversionFeatures& setPNext( void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    PhysicalDeviceSamplerYcbcrConversionFeatures& setSamplerYcbcrConversion( Bool32 samplerYcbcrConversion_ )
-    {
-      samplerYcbcrConversion = samplerYcbcrConversion_;
-      return *this;
-    }
-
-    operator VkPhysicalDeviceSamplerYcbcrConversionFeatures const&() const
-    {
-      return *reinterpret_cast<const VkPhysicalDeviceSamplerYcbcrConversionFeatures*>(this);
-    }
-
-    operator VkPhysicalDeviceSamplerYcbcrConversionFeatures &()
-    {
-      return *reinterpret_cast<VkPhysicalDeviceSamplerYcbcrConversionFeatures*>(this);
-    }
-
-    bool operator==( PhysicalDeviceSamplerYcbcrConversionFeatures const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( samplerYcbcrConversion == rhs.samplerYcbcrConversion );
-    }
-
-    bool operator!=( PhysicalDeviceSamplerYcbcrConversionFeatures const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures;
-
-  public:
-    void* pNext = nullptr;
-    Bool32 samplerYcbcrConversion;
-  };
-  static_assert( sizeof( PhysicalDeviceSamplerYcbcrConversionFeatures ) == sizeof( VkPhysicalDeviceSamplerYcbcrConversionFeatures ), "struct and wrapper have different size!" );
-
-  using PhysicalDeviceSamplerYcbcrConversionFeaturesKHR = PhysicalDeviceSamplerYcbcrConversionFeatures;
-
-  struct SamplerYcbcrConversionImageFormatProperties
-  {
-    operator VkSamplerYcbcrConversionImageFormatProperties const&() const
-    {
-      return *reinterpret_cast<const VkSamplerYcbcrConversionImageFormatProperties*>(this);
-    }
-
-    operator VkSamplerYcbcrConversionImageFormatProperties &()
-    {
-      return *reinterpret_cast<VkSamplerYcbcrConversionImageFormatProperties*>(this);
-    }
-
-    bool operator==( SamplerYcbcrConversionImageFormatProperties const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( combinedImageSamplerDescriptorCount == rhs.combinedImageSamplerDescriptorCount );
-    }
-
-    bool operator!=( SamplerYcbcrConversionImageFormatProperties const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eSamplerYcbcrConversionImageFormatProperties;
-
-  public:
-    void* pNext = nullptr;
-    uint32_t combinedImageSamplerDescriptorCount;
-  };
-  static_assert( sizeof( SamplerYcbcrConversionImageFormatProperties ) == sizeof( VkSamplerYcbcrConversionImageFormatProperties ), "struct and wrapper have different size!" );
-
-  using SamplerYcbcrConversionImageFormatPropertiesKHR = SamplerYcbcrConversionImageFormatProperties;
-
-  struct TextureLODGatherFormatPropertiesAMD
-  {
-    operator VkTextureLODGatherFormatPropertiesAMD const&() const
-    {
-      return *reinterpret_cast<const VkTextureLODGatherFormatPropertiesAMD*>(this);
-    }
-
-    operator VkTextureLODGatherFormatPropertiesAMD &()
-    {
-      return *reinterpret_cast<VkTextureLODGatherFormatPropertiesAMD*>(this);
-    }
-
-    bool operator==( TextureLODGatherFormatPropertiesAMD const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( supportsTextureGatherLODBiasAMD == rhs.supportsTextureGatherLODBiasAMD );
-    }
-
-    bool operator!=( TextureLODGatherFormatPropertiesAMD const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eTextureLodGatherFormatPropertiesAMD;
-
-  public:
-    void* pNext = nullptr;
-    Bool32 supportsTextureGatherLODBiasAMD;
-  };
-  static_assert( sizeof( TextureLODGatherFormatPropertiesAMD ) == sizeof( VkTextureLODGatherFormatPropertiesAMD ), "struct and wrapper have different size!" );
-
-  struct ProtectedSubmitInfo
-  {
-    ProtectedSubmitInfo( Bool32 protectedSubmit_ = 0 )
-      : protectedSubmit( protectedSubmit_ )
-    {
-    }
-
-    ProtectedSubmitInfo( VkProtectedSubmitInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ProtectedSubmitInfo ) );
-    }
-
-    ProtectedSubmitInfo& operator=( VkProtectedSubmitInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ProtectedSubmitInfo ) );
-      return *this;
-    }
-    ProtectedSubmitInfo& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    ProtectedSubmitInfo& setProtectedSubmit( Bool32 protectedSubmit_ )
-    {
-      protectedSubmit = protectedSubmit_;
-      return *this;
-    }
-
-    operator VkProtectedSubmitInfo const&() const
-    {
-      return *reinterpret_cast<const VkProtectedSubmitInfo*>(this);
-    }
-
-    operator VkProtectedSubmitInfo &()
-    {
-      return *reinterpret_cast<VkProtectedSubmitInfo*>(this);
-    }
-
-    bool operator==( ProtectedSubmitInfo const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( protectedSubmit == rhs.protectedSubmit );
-    }
-
-    bool operator!=( ProtectedSubmitInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eProtectedSubmitInfo;
-
-  public:
-    const void* pNext = nullptr;
-    Bool32 protectedSubmit;
-  };
-  static_assert( sizeof( ProtectedSubmitInfo ) == sizeof( VkProtectedSubmitInfo ), "struct and wrapper have different size!" );
-
-  struct PhysicalDeviceProtectedMemoryFeatures
-  {
-    PhysicalDeviceProtectedMemoryFeatures( Bool32 protectedMemory_ = 0 )
-      : protectedMemory( protectedMemory_ )
-    {
-    }
-
-    PhysicalDeviceProtectedMemoryFeatures( VkPhysicalDeviceProtectedMemoryFeatures const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceProtectedMemoryFeatures ) );
-    }
-
-    PhysicalDeviceProtectedMemoryFeatures& operator=( VkPhysicalDeviceProtectedMemoryFeatures const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceProtectedMemoryFeatures ) );
-      return *this;
-    }
-    PhysicalDeviceProtectedMemoryFeatures& setPNext( void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    PhysicalDeviceProtectedMemoryFeatures& setProtectedMemory( Bool32 protectedMemory_ )
-    {
-      protectedMemory = protectedMemory_;
-      return *this;
-    }
-
-    operator VkPhysicalDeviceProtectedMemoryFeatures const&() const
-    {
-      return *reinterpret_cast<const VkPhysicalDeviceProtectedMemoryFeatures*>(this);
-    }
-
-    operator VkPhysicalDeviceProtectedMemoryFeatures &()
-    {
-      return *reinterpret_cast<VkPhysicalDeviceProtectedMemoryFeatures*>(this);
-    }
-
-    bool operator==( PhysicalDeviceProtectedMemoryFeatures const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( protectedMemory == rhs.protectedMemory );
-    }
-
-    bool operator!=( PhysicalDeviceProtectedMemoryFeatures const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePhysicalDeviceProtectedMemoryFeatures;
-
-  public:
-    void* pNext = nullptr;
-    Bool32 protectedMemory;
-  };
-  static_assert( sizeof( PhysicalDeviceProtectedMemoryFeatures ) == sizeof( VkPhysicalDeviceProtectedMemoryFeatures ), "struct and wrapper have different size!" );
-
-  struct PhysicalDeviceProtectedMemoryProperties
-  {
-    PhysicalDeviceProtectedMemoryProperties( Bool32 protectedNoFault_ = 0 )
-      : protectedNoFault( protectedNoFault_ )
-    {
-    }
-
-    PhysicalDeviceProtectedMemoryProperties( VkPhysicalDeviceProtectedMemoryProperties const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceProtectedMemoryProperties ) );
-    }
-
-    PhysicalDeviceProtectedMemoryProperties& operator=( VkPhysicalDeviceProtectedMemoryProperties const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceProtectedMemoryProperties ) );
-      return *this;
-    }
-    PhysicalDeviceProtectedMemoryProperties& setPNext( void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    PhysicalDeviceProtectedMemoryProperties& setProtectedNoFault( Bool32 protectedNoFault_ )
-    {
-      protectedNoFault = protectedNoFault_;
-      return *this;
-    }
-
-    operator VkPhysicalDeviceProtectedMemoryProperties const&() const
-    {
-      return *reinterpret_cast<const VkPhysicalDeviceProtectedMemoryProperties*>(this);
-    }
-
-    operator VkPhysicalDeviceProtectedMemoryProperties &()
-    {
-      return *reinterpret_cast<VkPhysicalDeviceProtectedMemoryProperties*>(this);
-    }
-
-    bool operator==( PhysicalDeviceProtectedMemoryProperties const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( protectedNoFault == rhs.protectedNoFault );
-    }
-
-    bool operator!=( PhysicalDeviceProtectedMemoryProperties const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePhysicalDeviceProtectedMemoryProperties;
-
-  public:
-    void* pNext = nullptr;
-    Bool32 protectedNoFault;
-  };
-  static_assert( sizeof( PhysicalDeviceProtectedMemoryProperties ) == sizeof( VkPhysicalDeviceProtectedMemoryProperties ), "struct and wrapper have different size!" );
-
-  struct PipelineCoverageToColorStateCreateInfoNV
-  {
-    PipelineCoverageToColorStateCreateInfoNV( PipelineCoverageToColorStateCreateFlagsNV flags_ = PipelineCoverageToColorStateCreateFlagsNV(),
-                                              Bool32 coverageToColorEnable_ = 0,
-                                              uint32_t coverageToColorLocation_ = 0 )
-      : flags( flags_ )
-      , coverageToColorEnable( coverageToColorEnable_ )
-      , coverageToColorLocation( coverageToColorLocation_ )
-    {
-    }
-
-    PipelineCoverageToColorStateCreateInfoNV( VkPipelineCoverageToColorStateCreateInfoNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PipelineCoverageToColorStateCreateInfoNV ) );
-    }
-
-    PipelineCoverageToColorStateCreateInfoNV& operator=( VkPipelineCoverageToColorStateCreateInfoNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PipelineCoverageToColorStateCreateInfoNV ) );
-      return *this;
-    }
-    PipelineCoverageToColorStateCreateInfoNV& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    PipelineCoverageToColorStateCreateInfoNV& setFlags( PipelineCoverageToColorStateCreateFlagsNV flags_ )
-    {
-      flags = flags_;
-      return *this;
-    }
-
-    PipelineCoverageToColorStateCreateInfoNV& setCoverageToColorEnable( Bool32 coverageToColorEnable_ )
-    {
-      coverageToColorEnable = coverageToColorEnable_;
-      return *this;
-    }
-
-    PipelineCoverageToColorStateCreateInfoNV& setCoverageToColorLocation( uint32_t coverageToColorLocation_ )
-    {
-      coverageToColorLocation = coverageToColorLocation_;
-      return *this;
-    }
-
-    operator VkPipelineCoverageToColorStateCreateInfoNV const&() const
-    {
-      return *reinterpret_cast<const VkPipelineCoverageToColorStateCreateInfoNV*>(this);
-    }
-
-    operator VkPipelineCoverageToColorStateCreateInfoNV &()
-    {
-      return *reinterpret_cast<VkPipelineCoverageToColorStateCreateInfoNV*>(this);
-    }
-
-    bool operator==( PipelineCoverageToColorStateCreateInfoNV const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( coverageToColorEnable == rhs.coverageToColorEnable )
-          && ( coverageToColorLocation == rhs.coverageToColorLocation );
-    }
-
-    bool operator!=( PipelineCoverageToColorStateCreateInfoNV const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePipelineCoverageToColorStateCreateInfoNV;
-
-  public:
-    const void* pNext = nullptr;
-    PipelineCoverageToColorStateCreateFlagsNV flags;
-    Bool32 coverageToColorEnable;
-    uint32_t coverageToColorLocation;
-  };
-  static_assert( sizeof( PipelineCoverageToColorStateCreateInfoNV ) == sizeof( VkPipelineCoverageToColorStateCreateInfoNV ), "struct and wrapper have different size!" );
-
-  struct PhysicalDeviceSamplerFilterMinmaxPropertiesEXT
-  {
-    operator VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT const&() const
-    {
-      return *reinterpret_cast<const VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT*>(this);
-    }
-
-    operator VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT &()
-    {
-      return *reinterpret_cast<VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT*>(this);
-    }
-
-    bool operator==( PhysicalDeviceSamplerFilterMinmaxPropertiesEXT const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( filterMinmaxSingleComponentFormats == rhs.filterMinmaxSingleComponentFormats )
-          && ( filterMinmaxImageComponentMapping == rhs.filterMinmaxImageComponentMapping );
-    }
-
-    bool operator!=( PhysicalDeviceSamplerFilterMinmaxPropertiesEXT const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePhysicalDeviceSamplerFilterMinmaxPropertiesEXT;
-
-  public:
-    void* pNext = nullptr;
-    Bool32 filterMinmaxSingleComponentFormats;
-    Bool32 filterMinmaxImageComponentMapping;
-  };
-  static_assert( sizeof( PhysicalDeviceSamplerFilterMinmaxPropertiesEXT ) == sizeof( VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT ), "struct and wrapper have different size!" );
-
-  struct MultisamplePropertiesEXT
-  {
-    operator VkMultisamplePropertiesEXT const&() const
-    {
-      return *reinterpret_cast<const VkMultisamplePropertiesEXT*>(this);
-    }
-
-    operator VkMultisamplePropertiesEXT &()
-    {
-      return *reinterpret_cast<VkMultisamplePropertiesEXT*>(this);
-    }
-
-    bool operator==( MultisamplePropertiesEXT const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize );
-    }
-
-    bool operator!=( MultisamplePropertiesEXT const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eMultisamplePropertiesEXT;
-
-  public:
-    void* pNext = nullptr;
-    Extent2D maxSampleLocationGridSize;
-  };
-  static_assert( sizeof( MultisamplePropertiesEXT ) == sizeof( VkMultisamplePropertiesEXT ), "struct and wrapper have different size!" );
-
-  struct PhysicalDeviceBlendOperationAdvancedFeaturesEXT
-  {
-    PhysicalDeviceBlendOperationAdvancedFeaturesEXT( Bool32 advancedBlendCoherentOperations_ = 0 )
-      : advancedBlendCoherentOperations( advancedBlendCoherentOperations_ )
-    {
-    }
-
-    PhysicalDeviceBlendOperationAdvancedFeaturesEXT( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceBlendOperationAdvancedFeaturesEXT ) );
-    }
-
-    PhysicalDeviceBlendOperationAdvancedFeaturesEXT& operator=( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceBlendOperationAdvancedFeaturesEXT ) );
-      return *this;
-    }
-    PhysicalDeviceBlendOperationAdvancedFeaturesEXT& setPNext( void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    PhysicalDeviceBlendOperationAdvancedFeaturesEXT& setAdvancedBlendCoherentOperations( Bool32 advancedBlendCoherentOperations_ )
-    {
-      advancedBlendCoherentOperations = advancedBlendCoherentOperations_;
-      return *this;
-    }
-
-    operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const&() const
-    {
-      return *reinterpret_cast<const VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT*>(this);
-    }
-
-    operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT &()
-    {
-      return *reinterpret_cast<VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT*>(this);
-    }
-
-    bool operator==( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( advancedBlendCoherentOperations == rhs.advancedBlendCoherentOperations );
-    }
-
-    bool operator!=( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT;
-
-  public:
-    void* pNext = nullptr;
-    Bool32 advancedBlendCoherentOperations;
-  };
-  static_assert( sizeof( PhysicalDeviceBlendOperationAdvancedFeaturesEXT ) == sizeof( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT ), "struct and wrapper have different size!" );
-
-  struct PhysicalDeviceBlendOperationAdvancedPropertiesEXT
-  {
-    operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const&() const
-    {
-      return *reinterpret_cast<const VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT*>(this);
-    }
-
-    operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT &()
-    {
-      return *reinterpret_cast<VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT*>(this);
-    }
-
-    bool operator==( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( advancedBlendMaxColorAttachments == rhs.advancedBlendMaxColorAttachments )
-          && ( advancedBlendIndependentBlend == rhs.advancedBlendIndependentBlend )
-          && ( advancedBlendNonPremultipliedSrcColor == rhs.advancedBlendNonPremultipliedSrcColor )
-          && ( advancedBlendNonPremultipliedDstColor == rhs.advancedBlendNonPremultipliedDstColor )
-          && ( advancedBlendCorrelatedOverlap == rhs.advancedBlendCorrelatedOverlap )
-          && ( advancedBlendAllOperations == rhs.advancedBlendAllOperations );
-    }
-
-    bool operator!=( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT;
-
-  public:
-    void* pNext = nullptr;
-    uint32_t advancedBlendMaxColorAttachments;
-    Bool32 advancedBlendIndependentBlend;
-    Bool32 advancedBlendNonPremultipliedSrcColor;
-    Bool32 advancedBlendNonPremultipliedDstColor;
-    Bool32 advancedBlendCorrelatedOverlap;
-    Bool32 advancedBlendAllOperations;
-  };
-  static_assert( sizeof( PhysicalDeviceBlendOperationAdvancedPropertiesEXT ) == sizeof( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT ), "struct and wrapper have different size!" );
-
-  struct PhysicalDeviceInlineUniformBlockFeaturesEXT
-  {
-    operator VkPhysicalDeviceInlineUniformBlockFeaturesEXT const&() const
-    {
-      return *reinterpret_cast<const VkPhysicalDeviceInlineUniformBlockFeaturesEXT*>(this);
-    }
-
-    operator VkPhysicalDeviceInlineUniformBlockFeaturesEXT &()
-    {
-      return *reinterpret_cast<VkPhysicalDeviceInlineUniformBlockFeaturesEXT*>(this);
-    }
-
-    bool operator==( PhysicalDeviceInlineUniformBlockFeaturesEXT const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( inlineUniformBlock == rhs.inlineUniformBlock )
-          && ( descriptorBindingInlineUniformBlockUpdateAfterBind == rhs.descriptorBindingInlineUniformBlockUpdateAfterBind );
-    }
-
-    bool operator!=( PhysicalDeviceInlineUniformBlockFeaturesEXT const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePhysicalDeviceInlineUniformBlockFeaturesEXT;
-
-  public:
-    void* pNext = nullptr;
-    Bool32 inlineUniformBlock;
-    Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind;
-  };
-  static_assert( sizeof( PhysicalDeviceInlineUniformBlockFeaturesEXT ) == sizeof( VkPhysicalDeviceInlineUniformBlockFeaturesEXT ), "struct and wrapper have different size!" );
-
-  struct PhysicalDeviceInlineUniformBlockPropertiesEXT
-  {
-    operator VkPhysicalDeviceInlineUniformBlockPropertiesEXT const&() const
-    {
-      return *reinterpret_cast<const VkPhysicalDeviceInlineUniformBlockPropertiesEXT*>(this);
-    }
-
-    operator VkPhysicalDeviceInlineUniformBlockPropertiesEXT &()
-    {
-      return *reinterpret_cast<VkPhysicalDeviceInlineUniformBlockPropertiesEXT*>(this);
-    }
-
-    bool operator==( PhysicalDeviceInlineUniformBlockPropertiesEXT const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( maxInlineUniformBlockSize == rhs.maxInlineUniformBlockSize )
-          && ( maxPerStageDescriptorInlineUniformBlocks == rhs.maxPerStageDescriptorInlineUniformBlocks )
-          && ( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks == rhs.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks )
-          && ( maxDescriptorSetInlineUniformBlocks == rhs.maxDescriptorSetInlineUniformBlocks )
-          && ( maxDescriptorSetUpdateAfterBindInlineUniformBlocks == rhs.maxDescriptorSetUpdateAfterBindInlineUniformBlocks );
-    }
-
-    bool operator!=( PhysicalDeviceInlineUniformBlockPropertiesEXT const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePhysicalDeviceInlineUniformBlockPropertiesEXT;
-
-  public:
-    void* pNext = nullptr;
-    uint32_t maxInlineUniformBlockSize;
-    uint32_t maxPerStageDescriptorInlineUniformBlocks;
-    uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks;
-    uint32_t maxDescriptorSetInlineUniformBlocks;
-    uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks;
-  };
-  static_assert( sizeof( PhysicalDeviceInlineUniformBlockPropertiesEXT ) == sizeof( VkPhysicalDeviceInlineUniformBlockPropertiesEXT ), "struct and wrapper have different size!" );
-
-  struct WriteDescriptorSetInlineUniformBlockEXT
-  {
-    WriteDescriptorSetInlineUniformBlockEXT( uint32_t dataSize_ = 0,
-                                             const void* pData_ = nullptr )
-      : dataSize( dataSize_ )
-      , pData( pData_ )
-    {
-    }
-
-    WriteDescriptorSetInlineUniformBlockEXT( VkWriteDescriptorSetInlineUniformBlockEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( WriteDescriptorSetInlineUniformBlockEXT ) );
-    }
-
-    WriteDescriptorSetInlineUniformBlockEXT& operator=( VkWriteDescriptorSetInlineUniformBlockEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( WriteDescriptorSetInlineUniformBlockEXT ) );
-      return *this;
-    }
-    WriteDescriptorSetInlineUniformBlockEXT& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    WriteDescriptorSetInlineUniformBlockEXT& setDataSize( uint32_t dataSize_ )
-    {
-      dataSize = dataSize_;
-      return *this;
-    }
-
-    WriteDescriptorSetInlineUniformBlockEXT& setPData( const void* pData_ )
-    {
-      pData = pData_;
-      return *this;
-    }
-
-    operator VkWriteDescriptorSetInlineUniformBlockEXT const&() const
-    {
-      return *reinterpret_cast<const VkWriteDescriptorSetInlineUniformBlockEXT*>(this);
-    }
-
-    operator VkWriteDescriptorSetInlineUniformBlockEXT &()
-    {
-      return *reinterpret_cast<VkWriteDescriptorSetInlineUniformBlockEXT*>(this);
-    }
-
-    bool operator==( WriteDescriptorSetInlineUniformBlockEXT const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( dataSize == rhs.dataSize )
-          && ( pData == rhs.pData );
-    }
-
-    bool operator!=( WriteDescriptorSetInlineUniformBlockEXT const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eWriteDescriptorSetInlineUniformBlockEXT;
-
-  public:
-    const void* pNext = nullptr;
-    uint32_t dataSize;
-    const void* pData;
-  };
-  static_assert( sizeof( WriteDescriptorSetInlineUniformBlockEXT ) == sizeof( VkWriteDescriptorSetInlineUniformBlockEXT ), "struct and wrapper have different size!" );
-
-  struct DescriptorPoolInlineUniformBlockCreateInfoEXT
-  {
-    DescriptorPoolInlineUniformBlockCreateInfoEXT( uint32_t maxInlineUniformBlockBindings_ = 0 )
-      : maxInlineUniformBlockBindings( maxInlineUniformBlockBindings_ )
-    {
-    }
-
-    DescriptorPoolInlineUniformBlockCreateInfoEXT( VkDescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DescriptorPoolInlineUniformBlockCreateInfoEXT ) );
-    }
-
-    DescriptorPoolInlineUniformBlockCreateInfoEXT& operator=( VkDescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DescriptorPoolInlineUniformBlockCreateInfoEXT ) );
-      return *this;
-    }
-    DescriptorPoolInlineUniformBlockCreateInfoEXT& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    DescriptorPoolInlineUniformBlockCreateInfoEXT& setMaxInlineUniformBlockBindings( uint32_t maxInlineUniformBlockBindings_ )
-    {
-      maxInlineUniformBlockBindings = maxInlineUniformBlockBindings_;
-      return *this;
-    }
-
-    operator VkDescriptorPoolInlineUniformBlockCreateInfoEXT const&() const
-    {
-      return *reinterpret_cast<const VkDescriptorPoolInlineUniformBlockCreateInfoEXT*>(this);
-    }
-
-    operator VkDescriptorPoolInlineUniformBlockCreateInfoEXT &()
-    {
-      return *reinterpret_cast<VkDescriptorPoolInlineUniformBlockCreateInfoEXT*>(this);
-    }
-
-    bool operator==( DescriptorPoolInlineUniformBlockCreateInfoEXT const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( maxInlineUniformBlockBindings == rhs.maxInlineUniformBlockBindings );
-    }
-
-    bool operator!=( DescriptorPoolInlineUniformBlockCreateInfoEXT const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eDescriptorPoolInlineUniformBlockCreateInfoEXT;
-
-  public:
-    const void* pNext = nullptr;
-    uint32_t maxInlineUniformBlockBindings;
-  };
-  static_assert( sizeof( DescriptorPoolInlineUniformBlockCreateInfoEXT ) == sizeof( VkDescriptorPoolInlineUniformBlockCreateInfoEXT ), "struct and wrapper have different size!" );
-
-  struct ImageFormatListCreateInfoKHR
-  {
-    ImageFormatListCreateInfoKHR( uint32_t viewFormatCount_ = 0,
-                                  const Format* pViewFormats_ = nullptr )
-      : viewFormatCount( viewFormatCount_ )
-      , pViewFormats( pViewFormats_ )
-    {
-    }
-
-    ImageFormatListCreateInfoKHR( VkImageFormatListCreateInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ImageFormatListCreateInfoKHR ) );
-    }
-
-    ImageFormatListCreateInfoKHR& operator=( VkImageFormatListCreateInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ImageFormatListCreateInfoKHR ) );
-      return *this;
-    }
-    ImageFormatListCreateInfoKHR& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    ImageFormatListCreateInfoKHR& setViewFormatCount( uint32_t viewFormatCount_ )
-    {
-      viewFormatCount = viewFormatCount_;
-      return *this;
-    }
-
-    ImageFormatListCreateInfoKHR& setPViewFormats( const Format* pViewFormats_ )
-    {
-      pViewFormats = pViewFormats_;
-      return *this;
-    }
-
-    operator VkImageFormatListCreateInfoKHR const&() const
-    {
-      return *reinterpret_cast<const VkImageFormatListCreateInfoKHR*>(this);
-    }
-
-    operator VkImageFormatListCreateInfoKHR &()
-    {
-      return *reinterpret_cast<VkImageFormatListCreateInfoKHR*>(this);
-    }
-
-    bool operator==( ImageFormatListCreateInfoKHR const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( viewFormatCount == rhs.viewFormatCount )
-          && ( pViewFormats == rhs.pViewFormats );
-    }
-
-    bool operator!=( ImageFormatListCreateInfoKHR const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eImageFormatListCreateInfoKHR;
-
-  public:
-    const void* pNext = nullptr;
-    uint32_t viewFormatCount;
-    const Format* pViewFormats;
-  };
-  static_assert( sizeof( ImageFormatListCreateInfoKHR ) == sizeof( VkImageFormatListCreateInfoKHR ), "struct and wrapper have different size!" );
-
-  struct ValidationCacheCreateInfoEXT
-  {
-    ValidationCacheCreateInfoEXT( ValidationCacheCreateFlagsEXT flags_ = ValidationCacheCreateFlagsEXT(),
-                                  size_t initialDataSize_ = 0,
-                                  const void* pInitialData_ = nullptr )
-      : flags( flags_ )
-      , initialDataSize( initialDataSize_ )
-      , pInitialData( pInitialData_ )
-    {
-    }
-
-    ValidationCacheCreateInfoEXT( VkValidationCacheCreateInfoEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ValidationCacheCreateInfoEXT ) );
-    }
-
-    ValidationCacheCreateInfoEXT& operator=( VkValidationCacheCreateInfoEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ValidationCacheCreateInfoEXT ) );
-      return *this;
-    }
-    ValidationCacheCreateInfoEXT& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    ValidationCacheCreateInfoEXT& setFlags( ValidationCacheCreateFlagsEXT flags_ )
-    {
-      flags = flags_;
-      return *this;
-    }
-
-    ValidationCacheCreateInfoEXT& setInitialDataSize( size_t initialDataSize_ )
-    {
-      initialDataSize = initialDataSize_;
-      return *this;
-    }
-
-    ValidationCacheCreateInfoEXT& setPInitialData( const void* pInitialData_ )
-    {
-      pInitialData = pInitialData_;
-      return *this;
-    }
-
-    operator VkValidationCacheCreateInfoEXT const&() const
-    {
-      return *reinterpret_cast<const VkValidationCacheCreateInfoEXT*>(this);
-    }
-
-    operator VkValidationCacheCreateInfoEXT &()
-    {
-      return *reinterpret_cast<VkValidationCacheCreateInfoEXT*>(this);
-    }
-
-    bool operator==( ValidationCacheCreateInfoEXT const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( initialDataSize == rhs.initialDataSize )
-          && ( pInitialData == rhs.pInitialData );
-    }
-
-    bool operator!=( ValidationCacheCreateInfoEXT const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eValidationCacheCreateInfoEXT;
-
-  public:
-    const void* pNext = nullptr;
-    ValidationCacheCreateFlagsEXT flags;
-    size_t initialDataSize;
-    const void* pInitialData;
-  };
-  static_assert( sizeof( ValidationCacheCreateInfoEXT ) == sizeof( VkValidationCacheCreateInfoEXT ), "struct and wrapper have different size!" );
-
-  struct ShaderModuleValidationCacheCreateInfoEXT
-  {
-    ShaderModuleValidationCacheCreateInfoEXT( ValidationCacheEXT validationCache_ = ValidationCacheEXT() )
-      : validationCache( validationCache_ )
-    {
-    }
-
-    ShaderModuleValidationCacheCreateInfoEXT( VkShaderModuleValidationCacheCreateInfoEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ShaderModuleValidationCacheCreateInfoEXT ) );
-    }
-
-    ShaderModuleValidationCacheCreateInfoEXT& operator=( VkShaderModuleValidationCacheCreateInfoEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ShaderModuleValidationCacheCreateInfoEXT ) );
-      return *this;
-    }
-    ShaderModuleValidationCacheCreateInfoEXT& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    ShaderModuleValidationCacheCreateInfoEXT& setValidationCache( ValidationCacheEXT validationCache_ )
-    {
-      validationCache = validationCache_;
-      return *this;
-    }
-
-    operator VkShaderModuleValidationCacheCreateInfoEXT const&() const
-    {
-      return *reinterpret_cast<const VkShaderModuleValidationCacheCreateInfoEXT*>(this);
-    }
-
-    operator VkShaderModuleValidationCacheCreateInfoEXT &()
-    {
-      return *reinterpret_cast<VkShaderModuleValidationCacheCreateInfoEXT*>(this);
-    }
-
-    bool operator==( ShaderModuleValidationCacheCreateInfoEXT const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( validationCache == rhs.validationCache );
-    }
-
-    bool operator!=( ShaderModuleValidationCacheCreateInfoEXT const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eShaderModuleValidationCacheCreateInfoEXT;
-
-  public:
-    const void* pNext = nullptr;
-    ValidationCacheEXT validationCache;
-  };
-  static_assert( sizeof( ShaderModuleValidationCacheCreateInfoEXT ) == sizeof( VkShaderModuleValidationCacheCreateInfoEXT ), "struct and wrapper have different size!" );
-
-  struct PhysicalDeviceMaintenance3Properties
-  {
-    operator VkPhysicalDeviceMaintenance3Properties const&() const
-    {
-      return *reinterpret_cast<const VkPhysicalDeviceMaintenance3Properties*>(this);
-    }
-
-    operator VkPhysicalDeviceMaintenance3Properties &()
-    {
-      return *reinterpret_cast<VkPhysicalDeviceMaintenance3Properties*>(this);
-    }
-
-    bool operator==( PhysicalDeviceMaintenance3Properties const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( maxPerSetDescriptors == rhs.maxPerSetDescriptors )
-          && ( maxMemoryAllocationSize == rhs.maxMemoryAllocationSize );
-    }
-
-    bool operator!=( PhysicalDeviceMaintenance3Properties const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePhysicalDeviceMaintenance3Properties;
-
-  public:
-    void* pNext = nullptr;
-    uint32_t maxPerSetDescriptors;
-    DeviceSize maxMemoryAllocationSize;
-  };
-  static_assert( sizeof( PhysicalDeviceMaintenance3Properties ) == sizeof( VkPhysicalDeviceMaintenance3Properties ), "struct and wrapper have different size!" );
-
-  using PhysicalDeviceMaintenance3PropertiesKHR = PhysicalDeviceMaintenance3Properties;
-
-  struct DescriptorSetLayoutSupport
-  {
-    operator VkDescriptorSetLayoutSupport const&() const
-    {
-      return *reinterpret_cast<const VkDescriptorSetLayoutSupport*>(this);
-    }
-
-    operator VkDescriptorSetLayoutSupport &()
-    {
-      return *reinterpret_cast<VkDescriptorSetLayoutSupport*>(this);
-    }
-
-    bool operator==( DescriptorSetLayoutSupport const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( supported == rhs.supported );
-    }
-
-    bool operator!=( DescriptorSetLayoutSupport const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eDescriptorSetLayoutSupport;
-
-  public:
-    void* pNext = nullptr;
-    Bool32 supported;
-  };
-  static_assert( sizeof( DescriptorSetLayoutSupport ) == sizeof( VkDescriptorSetLayoutSupport ), "struct and wrapper have different size!" );
-
-  using DescriptorSetLayoutSupportKHR = DescriptorSetLayoutSupport;
-
-  struct PhysicalDeviceShaderDrawParameterFeatures
-  {
-    PhysicalDeviceShaderDrawParameterFeatures( Bool32 shaderDrawParameters_ = 0 )
-      : shaderDrawParameters( shaderDrawParameters_ )
-    {
-    }
-
-    PhysicalDeviceShaderDrawParameterFeatures( VkPhysicalDeviceShaderDrawParameterFeatures const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceShaderDrawParameterFeatures ) );
-    }
-
-    PhysicalDeviceShaderDrawParameterFeatures& operator=( VkPhysicalDeviceShaderDrawParameterFeatures const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceShaderDrawParameterFeatures ) );
-      return *this;
-    }
-    PhysicalDeviceShaderDrawParameterFeatures& setPNext( void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    PhysicalDeviceShaderDrawParameterFeatures& setShaderDrawParameters( Bool32 shaderDrawParameters_ )
-    {
-      shaderDrawParameters = shaderDrawParameters_;
-      return *this;
-    }
-
-    operator VkPhysicalDeviceShaderDrawParameterFeatures const&() const
-    {
-      return *reinterpret_cast<const VkPhysicalDeviceShaderDrawParameterFeatures*>(this);
-    }
-
-    operator VkPhysicalDeviceShaderDrawParameterFeatures &()
-    {
-      return *reinterpret_cast<VkPhysicalDeviceShaderDrawParameterFeatures*>(this);
-    }
-
-    bool operator==( PhysicalDeviceShaderDrawParameterFeatures const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( shaderDrawParameters == rhs.shaderDrawParameters );
-    }
-
-    bool operator!=( PhysicalDeviceShaderDrawParameterFeatures const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePhysicalDeviceShaderDrawParameterFeatures;
-
-  public:
-    void* pNext = nullptr;
-    Bool32 shaderDrawParameters;
-  };
-  static_assert( sizeof( PhysicalDeviceShaderDrawParameterFeatures ) == sizeof( VkPhysicalDeviceShaderDrawParameterFeatures ), "struct and wrapper have different size!" );
-
-  struct DebugUtilsLabelEXT
-  {
-    DebugUtilsLabelEXT( const char* pLabelName_ = nullptr,
-                        std::array<float,4> const& color_ = { { 0, 0, 0, 0 } } )
-      : pLabelName( pLabelName_ )
-    {
-      memcpy( &color, color_.data(), 4 * sizeof( float ) );
-    }
-
-    DebugUtilsLabelEXT( VkDebugUtilsLabelEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DebugUtilsLabelEXT ) );
-    }
-
-    DebugUtilsLabelEXT& operator=( VkDebugUtilsLabelEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DebugUtilsLabelEXT ) );
-      return *this;
-    }
-    DebugUtilsLabelEXT& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    DebugUtilsLabelEXT& setPLabelName( const char* pLabelName_ )
-    {
-      pLabelName = pLabelName_;
-      return *this;
-    }
-
-    DebugUtilsLabelEXT& setColor( std::array<float,4> color_ )
-    {
-      memcpy( &color, color_.data(), 4 * sizeof( float ) );
-      return *this;
-    }
-
-    operator VkDebugUtilsLabelEXT const&() const
-    {
-      return *reinterpret_cast<const VkDebugUtilsLabelEXT*>(this);
-    }
-
-    operator VkDebugUtilsLabelEXT &()
-    {
-      return *reinterpret_cast<VkDebugUtilsLabelEXT*>(this);
-    }
-
-    bool operator==( DebugUtilsLabelEXT const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( pLabelName == rhs.pLabelName )
-          && ( memcmp( color, rhs.color, 4 * sizeof( float ) ) == 0 );
-    }
-
-    bool operator!=( DebugUtilsLabelEXT const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eDebugUtilsLabelEXT;
-
-  public:
-    const void* pNext = nullptr;
-    const char* pLabelName;
-    float color[4];
-  };
-  static_assert( sizeof( DebugUtilsLabelEXT ) == sizeof( VkDebugUtilsLabelEXT ), "struct and wrapper have different size!" );
-
-  struct MemoryHostPointerPropertiesEXT
-  {
-    MemoryHostPointerPropertiesEXT( uint32_t memoryTypeBits_ = 0 )
-      : memoryTypeBits( memoryTypeBits_ )
-    {
-    }
-
-    MemoryHostPointerPropertiesEXT( VkMemoryHostPointerPropertiesEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( MemoryHostPointerPropertiesEXT ) );
-    }
-
-    MemoryHostPointerPropertiesEXT& operator=( VkMemoryHostPointerPropertiesEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( MemoryHostPointerPropertiesEXT ) );
-      return *this;
-    }
-    MemoryHostPointerPropertiesEXT& setPNext( void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    MemoryHostPointerPropertiesEXT& setMemoryTypeBits( uint32_t memoryTypeBits_ )
-    {
-      memoryTypeBits = memoryTypeBits_;
-      return *this;
-    }
-
-    operator VkMemoryHostPointerPropertiesEXT const&() const
-    {
-      return *reinterpret_cast<const VkMemoryHostPointerPropertiesEXT*>(this);
-    }
-
-    operator VkMemoryHostPointerPropertiesEXT &()
-    {
-      return *reinterpret_cast<VkMemoryHostPointerPropertiesEXT*>(this);
-    }
-
-    bool operator==( MemoryHostPointerPropertiesEXT const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( memoryTypeBits == rhs.memoryTypeBits );
-    }
-
-    bool operator!=( MemoryHostPointerPropertiesEXT const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eMemoryHostPointerPropertiesEXT;
-
-  public:
-    void* pNext = nullptr;
-    uint32_t memoryTypeBits;
-  };
-  static_assert( sizeof( MemoryHostPointerPropertiesEXT ) == sizeof( VkMemoryHostPointerPropertiesEXT ), "struct and wrapper have different size!" );
-
-  struct PhysicalDeviceExternalMemoryHostPropertiesEXT
-  {
-    PhysicalDeviceExternalMemoryHostPropertiesEXT( DeviceSize minImportedHostPointerAlignment_ = 0 )
-      : minImportedHostPointerAlignment( minImportedHostPointerAlignment_ )
-    {
-    }
-
-    PhysicalDeviceExternalMemoryHostPropertiesEXT( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceExternalMemoryHostPropertiesEXT ) );
-    }
-
-    PhysicalDeviceExternalMemoryHostPropertiesEXT& operator=( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceExternalMemoryHostPropertiesEXT ) );
-      return *this;
-    }
-    PhysicalDeviceExternalMemoryHostPropertiesEXT& setPNext( void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    PhysicalDeviceExternalMemoryHostPropertiesEXT& setMinImportedHostPointerAlignment( DeviceSize minImportedHostPointerAlignment_ )
-    {
-      minImportedHostPointerAlignment = minImportedHostPointerAlignment_;
-      return *this;
-    }
-
-    operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT const&() const
-    {
-      return *reinterpret_cast<const VkPhysicalDeviceExternalMemoryHostPropertiesEXT*>(this);
-    }
-
-    operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT &()
-    {
-      return *reinterpret_cast<VkPhysicalDeviceExternalMemoryHostPropertiesEXT*>(this);
-    }
-
-    bool operator==( PhysicalDeviceExternalMemoryHostPropertiesEXT const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( minImportedHostPointerAlignment == rhs.minImportedHostPointerAlignment );
-    }
-
-    bool operator!=( PhysicalDeviceExternalMemoryHostPropertiesEXT const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT;
-
-  public:
-    void* pNext = nullptr;
-    DeviceSize minImportedHostPointerAlignment;
-  };
-  static_assert( sizeof( PhysicalDeviceExternalMemoryHostPropertiesEXT ) == sizeof( VkPhysicalDeviceExternalMemoryHostPropertiesEXT ), "struct and wrapper have different size!" );
-
-  struct PhysicalDeviceConservativeRasterizationPropertiesEXT
-  {
-    PhysicalDeviceConservativeRasterizationPropertiesEXT( float primitiveOverestimationSize_ = 0,
-                                                          float maxExtraPrimitiveOverestimationSize_ = 0,
-                                                          float extraPrimitiveOverestimationSizeGranularity_ = 0,
-                                                          Bool32 primitiveUnderestimation_ = 0,
-                                                          Bool32 conservativePointAndLineRasterization_ = 0,
-                                                          Bool32 degenerateTrianglesRasterized_ = 0,
-                                                          Bool32 degenerateLinesRasterized_ = 0,
-                                                          Bool32 fullyCoveredFragmentShaderInputVariable_ = 0,
-                                                          Bool32 conservativeRasterizationPostDepthCoverage_ = 0 )
-      : primitiveOverestimationSize( primitiveOverestimationSize_ )
-      , maxExtraPrimitiveOverestimationSize( maxExtraPrimitiveOverestimationSize_ )
-      , extraPrimitiveOverestimationSizeGranularity( extraPrimitiveOverestimationSizeGranularity_ )
-      , primitiveUnderestimation( primitiveUnderestimation_ )
-      , conservativePointAndLineRasterization( conservativePointAndLineRasterization_ )
-      , degenerateTrianglesRasterized( degenerateTrianglesRasterized_ )
-      , degenerateLinesRasterized( degenerateLinesRasterized_ )
-      , fullyCoveredFragmentShaderInputVariable( fullyCoveredFragmentShaderInputVariable_ )
-      , conservativeRasterizationPostDepthCoverage( conservativeRasterizationPostDepthCoverage_ )
-    {
-    }
-
-    PhysicalDeviceConservativeRasterizationPropertiesEXT( VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceConservativeRasterizationPropertiesEXT ) );
-    }
-
-    PhysicalDeviceConservativeRasterizationPropertiesEXT& operator=( VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceConservativeRasterizationPropertiesEXT ) );
-      return *this;
-    }
-    PhysicalDeviceConservativeRasterizationPropertiesEXT& setPNext( void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    PhysicalDeviceConservativeRasterizationPropertiesEXT& setPrimitiveOverestimationSize( float primitiveOverestimationSize_ )
-    {
-      primitiveOverestimationSize = primitiveOverestimationSize_;
-      return *this;
-    }
-
-    PhysicalDeviceConservativeRasterizationPropertiesEXT& setMaxExtraPrimitiveOverestimationSize( float maxExtraPrimitiveOverestimationSize_ )
-    {
-      maxExtraPrimitiveOverestimationSize = maxExtraPrimitiveOverestimationSize_;
-      return *this;
-    }
-
-    PhysicalDeviceConservativeRasterizationPropertiesEXT& setExtraPrimitiveOverestimationSizeGranularity( float extraPrimitiveOverestimationSizeGranularity_ )
-    {
-      extraPrimitiveOverestimationSizeGranularity = extraPrimitiveOverestimationSizeGranularity_;
-      return *this;
-    }
-
-    PhysicalDeviceConservativeRasterizationPropertiesEXT& setPrimitiveUnderestimation( Bool32 primitiveUnderestimation_ )
-    {
-      primitiveUnderestimation = primitiveUnderestimation_;
-      return *this;
-    }
-
-    PhysicalDeviceConservativeRasterizationPropertiesEXT& setConservativePointAndLineRasterization( Bool32 conservativePointAndLineRasterization_ )
-    {
-      conservativePointAndLineRasterization = conservativePointAndLineRasterization_;
-      return *this;
-    }
-
-    PhysicalDeviceConservativeRasterizationPropertiesEXT& setDegenerateTrianglesRasterized( Bool32 degenerateTrianglesRasterized_ )
-    {
-      degenerateTrianglesRasterized = degenerateTrianglesRasterized_;
-      return *this;
-    }
-
-    PhysicalDeviceConservativeRasterizationPropertiesEXT& setDegenerateLinesRasterized( Bool32 degenerateLinesRasterized_ )
-    {
-      degenerateLinesRasterized = degenerateLinesRasterized_;
-      return *this;
-    }
-
-    PhysicalDeviceConservativeRasterizationPropertiesEXT& setFullyCoveredFragmentShaderInputVariable( Bool32 fullyCoveredFragmentShaderInputVariable_ )
-    {
-      fullyCoveredFragmentShaderInputVariable = fullyCoveredFragmentShaderInputVariable_;
-      return *this;
-    }
-
-    PhysicalDeviceConservativeRasterizationPropertiesEXT& setConservativeRasterizationPostDepthCoverage( Bool32 conservativeRasterizationPostDepthCoverage_ )
-    {
-      conservativeRasterizationPostDepthCoverage = conservativeRasterizationPostDepthCoverage_;
-      return *this;
-    }
-
-    operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT const&() const
-    {
-      return *reinterpret_cast<const VkPhysicalDeviceConservativeRasterizationPropertiesEXT*>(this);
-    }
-
-    operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT &()
-    {
-      return *reinterpret_cast<VkPhysicalDeviceConservativeRasterizationPropertiesEXT*>(this);
-    }
-
-    bool operator==( PhysicalDeviceConservativeRasterizationPropertiesEXT const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( primitiveOverestimationSize == rhs.primitiveOverestimationSize )
-          && ( maxExtraPrimitiveOverestimationSize == rhs.maxExtraPrimitiveOverestimationSize )
-          && ( extraPrimitiveOverestimationSizeGranularity == rhs.extraPrimitiveOverestimationSizeGranularity )
-          && ( primitiveUnderestimation == rhs.primitiveUnderestimation )
-          && ( conservativePointAndLineRasterization == rhs.conservativePointAndLineRasterization )
-          && ( degenerateTrianglesRasterized == rhs.degenerateTrianglesRasterized )
-          && ( degenerateLinesRasterized == rhs.degenerateLinesRasterized )
-          && ( fullyCoveredFragmentShaderInputVariable == rhs.fullyCoveredFragmentShaderInputVariable )
-          && ( conservativeRasterizationPostDepthCoverage == rhs.conservativeRasterizationPostDepthCoverage );
-    }
-
-    bool operator!=( PhysicalDeviceConservativeRasterizationPropertiesEXT const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT;
-
-  public:
-    void* pNext = nullptr;
-    float primitiveOverestimationSize;
-    float maxExtraPrimitiveOverestimationSize;
-    float extraPrimitiveOverestimationSizeGranularity;
-    Bool32 primitiveUnderestimation;
-    Bool32 conservativePointAndLineRasterization;
-    Bool32 degenerateTrianglesRasterized;
-    Bool32 degenerateLinesRasterized;
-    Bool32 fullyCoveredFragmentShaderInputVariable;
-    Bool32 conservativeRasterizationPostDepthCoverage;
-  };
-  static_assert( sizeof( PhysicalDeviceConservativeRasterizationPropertiesEXT ) == sizeof( VkPhysicalDeviceConservativeRasterizationPropertiesEXT ), "struct and wrapper have different size!" );
-
-  struct PhysicalDeviceShaderCorePropertiesAMD
-  {
-    operator VkPhysicalDeviceShaderCorePropertiesAMD const&() const
-    {
-      return *reinterpret_cast<const VkPhysicalDeviceShaderCorePropertiesAMD*>(this);
-    }
-
-    operator VkPhysicalDeviceShaderCorePropertiesAMD &()
-    {
-      return *reinterpret_cast<VkPhysicalDeviceShaderCorePropertiesAMD*>(this);
-    }
-
-    bool operator==( PhysicalDeviceShaderCorePropertiesAMD const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( shaderEngineCount == rhs.shaderEngineCount )
-          && ( shaderArraysPerEngineCount == rhs.shaderArraysPerEngineCount )
-          && ( computeUnitsPerShaderArray == rhs.computeUnitsPerShaderArray )
-          && ( simdPerComputeUnit == rhs.simdPerComputeUnit )
-          && ( wavefrontsPerSimd == rhs.wavefrontsPerSimd )
-          && ( wavefrontSize == rhs.wavefrontSize )
-          && ( sgprsPerSimd == rhs.sgprsPerSimd )
-          && ( minSgprAllocation == rhs.minSgprAllocation )
-          && ( maxSgprAllocation == rhs.maxSgprAllocation )
-          && ( sgprAllocationGranularity == rhs.sgprAllocationGranularity )
-          && ( vgprsPerSimd == rhs.vgprsPerSimd )
-          && ( minVgprAllocation == rhs.minVgprAllocation )
-          && ( maxVgprAllocation == rhs.maxVgprAllocation )
-          && ( vgprAllocationGranularity == rhs.vgprAllocationGranularity );
-    }
-
-    bool operator!=( PhysicalDeviceShaderCorePropertiesAMD const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePhysicalDeviceShaderCorePropertiesAMD;
-
-  public:
-    void* pNext = nullptr;
-    uint32_t shaderEngineCount;
-    uint32_t shaderArraysPerEngineCount;
-    uint32_t computeUnitsPerShaderArray;
-    uint32_t simdPerComputeUnit;
-    uint32_t wavefrontsPerSimd;
-    uint32_t wavefrontSize;
-    uint32_t sgprsPerSimd;
-    uint32_t minSgprAllocation;
-    uint32_t maxSgprAllocation;
-    uint32_t sgprAllocationGranularity;
-    uint32_t vgprsPerSimd;
-    uint32_t minVgprAllocation;
-    uint32_t maxVgprAllocation;
-    uint32_t vgprAllocationGranularity;
-  };
-  static_assert( sizeof( PhysicalDeviceShaderCorePropertiesAMD ) == sizeof( VkPhysicalDeviceShaderCorePropertiesAMD ), "struct and wrapper have different size!" );
-
-  struct PhysicalDeviceDescriptorIndexingFeaturesEXT
-  {
-    PhysicalDeviceDescriptorIndexingFeaturesEXT( Bool32 shaderInputAttachmentArrayDynamicIndexing_ = 0,
-                                                 Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ = 0,
-                                                 Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ = 0,
-                                                 Bool32 shaderUniformBufferArrayNonUniformIndexing_ = 0,
-                                                 Bool32 shaderSampledImageArrayNonUniformIndexing_ = 0,
-                                                 Bool32 shaderStorageBufferArrayNonUniformIndexing_ = 0,
-                                                 Bool32 shaderStorageImageArrayNonUniformIndexing_ = 0,
-                                                 Bool32 shaderInputAttachmentArrayNonUniformIndexing_ = 0,
-                                                 Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ = 0,
-                                                 Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ = 0,
-                                                 Bool32 descriptorBindingUniformBufferUpdateAfterBind_ = 0,
-                                                 Bool32 descriptorBindingSampledImageUpdateAfterBind_ = 0,
-                                                 Bool32 descriptorBindingStorageImageUpdateAfterBind_ = 0,
-                                                 Bool32 descriptorBindingStorageBufferUpdateAfterBind_ = 0,
-                                                 Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = 0,
-                                                 Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = 0,
-                                                 Bool32 descriptorBindingUpdateUnusedWhilePending_ = 0,
-                                                 Bool32 descriptorBindingPartiallyBound_ = 0,
-                                                 Bool32 descriptorBindingVariableDescriptorCount_ = 0,
-                                                 Bool32 runtimeDescriptorArray_ = 0 )
-      : shaderInputAttachmentArrayDynamicIndexing( shaderInputAttachmentArrayDynamicIndexing_ )
-      , shaderUniformTexelBufferArrayDynamicIndexing( shaderUniformTexelBufferArrayDynamicIndexing_ )
-      , shaderStorageTexelBufferArrayDynamicIndexing( shaderStorageTexelBufferArrayDynamicIndexing_ )
-      , shaderUniformBufferArrayNonUniformIndexing( shaderUniformBufferArrayNonUniformIndexing_ )
-      , shaderSampledImageArrayNonUniformIndexing( shaderSampledImageArrayNonUniformIndexing_ )
-      , shaderStorageBufferArrayNonUniformIndexing( shaderStorageBufferArrayNonUniformIndexing_ )
-      , shaderStorageImageArrayNonUniformIndexing( shaderStorageImageArrayNonUniformIndexing_ )
-      , shaderInputAttachmentArrayNonUniformIndexing( shaderInputAttachmentArrayNonUniformIndexing_ )
-      , shaderUniformTexelBufferArrayNonUniformIndexing( shaderUniformTexelBufferArrayNonUniformIndexing_ )
-      , shaderStorageTexelBufferArrayNonUniformIndexing( shaderStorageTexelBufferArrayNonUniformIndexing_ )
-      , descriptorBindingUniformBufferUpdateAfterBind( descriptorBindingUniformBufferUpdateAfterBind_ )
-      , descriptorBindingSampledImageUpdateAfterBind( descriptorBindingSampledImageUpdateAfterBind_ )
-      , descriptorBindingStorageImageUpdateAfterBind( descriptorBindingStorageImageUpdateAfterBind_ )
-      , descriptorBindingStorageBufferUpdateAfterBind( descriptorBindingStorageBufferUpdateAfterBind_ )
-      , descriptorBindingUniformTexelBufferUpdateAfterBind( descriptorBindingUniformTexelBufferUpdateAfterBind_ )
-      , descriptorBindingStorageTexelBufferUpdateAfterBind( descriptorBindingStorageTexelBufferUpdateAfterBind_ )
-      , descriptorBindingUpdateUnusedWhilePending( descriptorBindingUpdateUnusedWhilePending_ )
-      , descriptorBindingPartiallyBound( descriptorBindingPartiallyBound_ )
-      , descriptorBindingVariableDescriptorCount( descriptorBindingVariableDescriptorCount_ )
-      , runtimeDescriptorArray( runtimeDescriptorArray_ )
-    {
-    }
-
-    PhysicalDeviceDescriptorIndexingFeaturesEXT( VkPhysicalDeviceDescriptorIndexingFeaturesEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceDescriptorIndexingFeaturesEXT ) );
-    }
-
-    PhysicalDeviceDescriptorIndexingFeaturesEXT& operator=( VkPhysicalDeviceDescriptorIndexingFeaturesEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceDescriptorIndexingFeaturesEXT ) );
-      return *this;
-    }
-    PhysicalDeviceDescriptorIndexingFeaturesEXT& setPNext( void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    PhysicalDeviceDescriptorIndexingFeaturesEXT& setShaderInputAttachmentArrayDynamicIndexing( Bool32 shaderInputAttachmentArrayDynamicIndexing_ )
-    {
-      shaderInputAttachmentArrayDynamicIndexing = shaderInputAttachmentArrayDynamicIndexing_;
-      return *this;
-    }
-
-    PhysicalDeviceDescriptorIndexingFeaturesEXT& setShaderUniformTexelBufferArrayDynamicIndexing( Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ )
-    {
-      shaderUniformTexelBufferArrayDynamicIndexing = shaderUniformTexelBufferArrayDynamicIndexing_;
-      return *this;
-    }
-
-    PhysicalDeviceDescriptorIndexingFeaturesEXT& setShaderStorageTexelBufferArrayDynamicIndexing( Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ )
-    {
-      shaderStorageTexelBufferArrayDynamicIndexing = shaderStorageTexelBufferArrayDynamicIndexing_;
-      return *this;
-    }
-
-    PhysicalDeviceDescriptorIndexingFeaturesEXT& setShaderUniformBufferArrayNonUniformIndexing( Bool32 shaderUniformBufferArrayNonUniformIndexing_ )
-    {
-      shaderUniformBufferArrayNonUniformIndexing = shaderUniformBufferArrayNonUniformIndexing_;
-      return *this;
-    }
-
-    PhysicalDeviceDescriptorIndexingFeaturesEXT& setShaderSampledImageArrayNonUniformIndexing( Bool32 shaderSampledImageArrayNonUniformIndexing_ )
-    {
-      shaderSampledImageArrayNonUniformIndexing = shaderSampledImageArrayNonUniformIndexing_;
-      return *this;
-    }
-
-    PhysicalDeviceDescriptorIndexingFeaturesEXT& setShaderStorageBufferArrayNonUniformIndexing( Bool32 shaderStorageBufferArrayNonUniformIndexing_ )
-    {
-      shaderStorageBufferArrayNonUniformIndexing = shaderStorageBufferArrayNonUniformIndexing_;
-      return *this;
-    }
-
-    PhysicalDeviceDescriptorIndexingFeaturesEXT& setShaderStorageImageArrayNonUniformIndexing( Bool32 shaderStorageImageArrayNonUniformIndexing_ )
-    {
-      shaderStorageImageArrayNonUniformIndexing = shaderStorageImageArrayNonUniformIndexing_;
-      return *this;
-    }
-
-    PhysicalDeviceDescriptorIndexingFeaturesEXT& setShaderInputAttachmentArrayNonUniformIndexing( Bool32 shaderInputAttachmentArrayNonUniformIndexing_ )
-    {
-      shaderInputAttachmentArrayNonUniformIndexing = shaderInputAttachmentArrayNonUniformIndexing_;
-      return *this;
-    }
-
-    PhysicalDeviceDescriptorIndexingFeaturesEXT& setShaderUniformTexelBufferArrayNonUniformIndexing( Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ )
-    {
-      shaderUniformTexelBufferArrayNonUniformIndexing = shaderUniformTexelBufferArrayNonUniformIndexing_;
-      return *this;
-    }
-
-    PhysicalDeviceDescriptorIndexingFeaturesEXT& setShaderStorageTexelBufferArrayNonUniformIndexing( Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ )
-    {
-      shaderStorageTexelBufferArrayNonUniformIndexing = shaderStorageTexelBufferArrayNonUniformIndexing_;
-      return *this;
-    }
-
-    PhysicalDeviceDescriptorIndexingFeaturesEXT& setDescriptorBindingUniformBufferUpdateAfterBind( Bool32 descriptorBindingUniformBufferUpdateAfterBind_ )
-    {
-      descriptorBindingUniformBufferUpdateAfterBind = descriptorBindingUniformBufferUpdateAfterBind_;
-      return *this;
-    }
-
-    PhysicalDeviceDescriptorIndexingFeaturesEXT& setDescriptorBindingSampledImageUpdateAfterBind( Bool32 descriptorBindingSampledImageUpdateAfterBind_ )
-    {
-      descriptorBindingSampledImageUpdateAfterBind = descriptorBindingSampledImageUpdateAfterBind_;
-      return *this;
-    }
-
-    PhysicalDeviceDescriptorIndexingFeaturesEXT& setDescriptorBindingStorageImageUpdateAfterBind( Bool32 descriptorBindingStorageImageUpdateAfterBind_ )
-    {
-      descriptorBindingStorageImageUpdateAfterBind = descriptorBindingStorageImageUpdateAfterBind_;
-      return *this;
-    }
-
-    PhysicalDeviceDescriptorIndexingFeaturesEXT& setDescriptorBindingStorageBufferUpdateAfterBind( Bool32 descriptorBindingStorageBufferUpdateAfterBind_ )
-    {
-      descriptorBindingStorageBufferUpdateAfterBind = descriptorBindingStorageBufferUpdateAfterBind_;
-      return *this;
-    }
-
-    PhysicalDeviceDescriptorIndexingFeaturesEXT& setDescriptorBindingUniformTexelBufferUpdateAfterBind( Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ )
-    {
-      descriptorBindingUniformTexelBufferUpdateAfterBind = descriptorBindingUniformTexelBufferUpdateAfterBind_;
-      return *this;
-    }
-
-    PhysicalDeviceDescriptorIndexingFeaturesEXT& setDescriptorBindingStorageTexelBufferUpdateAfterBind( Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ )
-    {
-      descriptorBindingStorageTexelBufferUpdateAfterBind = descriptorBindingStorageTexelBufferUpdateAfterBind_;
-      return *this;
-    }
-
-    PhysicalDeviceDescriptorIndexingFeaturesEXT& setDescriptorBindingUpdateUnusedWhilePending( Bool32 descriptorBindingUpdateUnusedWhilePending_ )
-    {
-      descriptorBindingUpdateUnusedWhilePending = descriptorBindingUpdateUnusedWhilePending_;
-      return *this;
-    }
-
-    PhysicalDeviceDescriptorIndexingFeaturesEXT& setDescriptorBindingPartiallyBound( Bool32 descriptorBindingPartiallyBound_ )
-    {
-      descriptorBindingPartiallyBound = descriptorBindingPartiallyBound_;
-      return *this;
-    }
-
-    PhysicalDeviceDescriptorIndexingFeaturesEXT& setDescriptorBindingVariableDescriptorCount( Bool32 descriptorBindingVariableDescriptorCount_ )
-    {
-      descriptorBindingVariableDescriptorCount = descriptorBindingVariableDescriptorCount_;
-      return *this;
-    }
-
-    PhysicalDeviceDescriptorIndexingFeaturesEXT& setRuntimeDescriptorArray( Bool32 runtimeDescriptorArray_ )
-    {
-      runtimeDescriptorArray = runtimeDescriptorArray_;
-      return *this;
-    }
-
-    operator VkPhysicalDeviceDescriptorIndexingFeaturesEXT const&() const
-    {
-      return *reinterpret_cast<const VkPhysicalDeviceDescriptorIndexingFeaturesEXT*>(this);
-    }
-
-    operator VkPhysicalDeviceDescriptorIndexingFeaturesEXT &()
-    {
-      return *reinterpret_cast<VkPhysicalDeviceDescriptorIndexingFeaturesEXT*>(this);
-    }
-
-    bool operator==( PhysicalDeviceDescriptorIndexingFeaturesEXT const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( shaderInputAttachmentArrayDynamicIndexing == rhs.shaderInputAttachmentArrayDynamicIndexing )
-          && ( shaderUniformTexelBufferArrayDynamicIndexing == rhs.shaderUniformTexelBufferArrayDynamicIndexing )
-          && ( shaderStorageTexelBufferArrayDynamicIndexing == rhs.shaderStorageTexelBufferArrayDynamicIndexing )
-          && ( shaderUniformBufferArrayNonUniformIndexing == rhs.shaderUniformBufferArrayNonUniformIndexing )
-          && ( shaderSampledImageArrayNonUniformIndexing == rhs.shaderSampledImageArrayNonUniformIndexing )
-          && ( shaderStorageBufferArrayNonUniformIndexing == rhs.shaderStorageBufferArrayNonUniformIndexing )
-          && ( shaderStorageImageArrayNonUniformIndexing == rhs.shaderStorageImageArrayNonUniformIndexing )
-          && ( shaderInputAttachmentArrayNonUniformIndexing == rhs.shaderInputAttachmentArrayNonUniformIndexing )
-          && ( shaderUniformTexelBufferArrayNonUniformIndexing == rhs.shaderUniformTexelBufferArrayNonUniformIndexing )
-          && ( shaderStorageTexelBufferArrayNonUniformIndexing == rhs.shaderStorageTexelBufferArrayNonUniformIndexing )
-          && ( descriptorBindingUniformBufferUpdateAfterBind == rhs.descriptorBindingUniformBufferUpdateAfterBind )
-          && ( descriptorBindingSampledImageUpdateAfterBind == rhs.descriptorBindingSampledImageUpdateAfterBind )
-          && ( descriptorBindingStorageImageUpdateAfterBind == rhs.descriptorBindingStorageImageUpdateAfterBind )
-          && ( descriptorBindingStorageBufferUpdateAfterBind == rhs.descriptorBindingStorageBufferUpdateAfterBind )
-          && ( descriptorBindingUniformTexelBufferUpdateAfterBind == rhs.descriptorBindingUniformTexelBufferUpdateAfterBind )
-          && ( descriptorBindingStorageTexelBufferUpdateAfterBind == rhs.descriptorBindingStorageTexelBufferUpdateAfterBind )
-          && ( descriptorBindingUpdateUnusedWhilePending == rhs.descriptorBindingUpdateUnusedWhilePending )
-          && ( descriptorBindingPartiallyBound == rhs.descriptorBindingPartiallyBound )
-          && ( descriptorBindingVariableDescriptorCount == rhs.descriptorBindingVariableDescriptorCount )
-          && ( runtimeDescriptorArray == rhs.runtimeDescriptorArray );
-    }
-
-    bool operator!=( PhysicalDeviceDescriptorIndexingFeaturesEXT const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingFeaturesEXT;
-
-  public:
-    void* pNext = nullptr;
-    Bool32 shaderInputAttachmentArrayDynamicIndexing;
-    Bool32 shaderUniformTexelBufferArrayDynamicIndexing;
-    Bool32 shaderStorageTexelBufferArrayDynamicIndexing;
-    Bool32 shaderUniformBufferArrayNonUniformIndexing;
-    Bool32 shaderSampledImageArrayNonUniformIndexing;
-    Bool32 shaderStorageBufferArrayNonUniformIndexing;
-    Bool32 shaderStorageImageArrayNonUniformIndexing;
-    Bool32 shaderInputAttachmentArrayNonUniformIndexing;
-    Bool32 shaderUniformTexelBufferArrayNonUniformIndexing;
-    Bool32 shaderStorageTexelBufferArrayNonUniformIndexing;
-    Bool32 descriptorBindingUniformBufferUpdateAfterBind;
-    Bool32 descriptorBindingSampledImageUpdateAfterBind;
-    Bool32 descriptorBindingStorageImageUpdateAfterBind;
-    Bool32 descriptorBindingStorageBufferUpdateAfterBind;
-    Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind;
-    Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind;
-    Bool32 descriptorBindingUpdateUnusedWhilePending;
-    Bool32 descriptorBindingPartiallyBound;
-    Bool32 descriptorBindingVariableDescriptorCount;
-    Bool32 runtimeDescriptorArray;
-  };
-  static_assert( sizeof( PhysicalDeviceDescriptorIndexingFeaturesEXT ) == sizeof( VkPhysicalDeviceDescriptorIndexingFeaturesEXT ), "struct and wrapper have different size!" );
-
-  struct PhysicalDeviceDescriptorIndexingPropertiesEXT
-  {
-    operator VkPhysicalDeviceDescriptorIndexingPropertiesEXT const&() const
-    {
-      return *reinterpret_cast<const VkPhysicalDeviceDescriptorIndexingPropertiesEXT*>(this);
-    }
-
-    operator VkPhysicalDeviceDescriptorIndexingPropertiesEXT &()
-    {
-      return *reinterpret_cast<VkPhysicalDeviceDescriptorIndexingPropertiesEXT*>(this);
-    }
-
-    bool operator==( PhysicalDeviceDescriptorIndexingPropertiesEXT const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( maxUpdateAfterBindDescriptorsInAllPools == rhs.maxUpdateAfterBindDescriptorsInAllPools )
-          && ( shaderUniformBufferArrayNonUniformIndexingNative == rhs.shaderUniformBufferArrayNonUniformIndexingNative )
-          && ( shaderSampledImageArrayNonUniformIndexingNative == rhs.shaderSampledImageArrayNonUniformIndexingNative )
-          && ( shaderStorageBufferArrayNonUniformIndexingNative == rhs.shaderStorageBufferArrayNonUniformIndexingNative )
-          && ( shaderStorageImageArrayNonUniformIndexingNative == rhs.shaderStorageImageArrayNonUniformIndexingNative )
-          && ( shaderInputAttachmentArrayNonUniformIndexingNative == rhs.shaderInputAttachmentArrayNonUniformIndexingNative )
-          && ( robustBufferAccessUpdateAfterBind == rhs.robustBufferAccessUpdateAfterBind )
-          && ( quadDivergentImplicitLod == rhs.quadDivergentImplicitLod )
-          && ( maxPerStageDescriptorUpdateAfterBindSamplers == rhs.maxPerStageDescriptorUpdateAfterBindSamplers )
-          && ( maxPerStageDescriptorUpdateAfterBindUniformBuffers == rhs.maxPerStageDescriptorUpdateAfterBindUniformBuffers )
-          && ( maxPerStageDescriptorUpdateAfterBindStorageBuffers == rhs.maxPerStageDescriptorUpdateAfterBindStorageBuffers )
-          && ( maxPerStageDescriptorUpdateAfterBindSampledImages == rhs.maxPerStageDescriptorUpdateAfterBindSampledImages )
-          && ( maxPerStageDescriptorUpdateAfterBindStorageImages == rhs.maxPerStageDescriptorUpdateAfterBindStorageImages )
-          && ( maxPerStageDescriptorUpdateAfterBindInputAttachments == rhs.maxPerStageDescriptorUpdateAfterBindInputAttachments )
-          && ( maxPerStageUpdateAfterBindResources == rhs.maxPerStageUpdateAfterBindResources )
-          && ( maxDescriptorSetUpdateAfterBindSamplers == rhs.maxDescriptorSetUpdateAfterBindSamplers )
-          && ( maxDescriptorSetUpdateAfterBindUniformBuffers == rhs.maxDescriptorSetUpdateAfterBindUniformBuffers )
-          && ( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic )
-          && ( maxDescriptorSetUpdateAfterBindStorageBuffers == rhs.maxDescriptorSetUpdateAfterBindStorageBuffers )
-          && ( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic )
-          && ( maxDescriptorSetUpdateAfterBindSampledImages == rhs.maxDescriptorSetUpdateAfterBindSampledImages )
-          && ( maxDescriptorSetUpdateAfterBindStorageImages == rhs.maxDescriptorSetUpdateAfterBindStorageImages )
-          && ( maxDescriptorSetUpdateAfterBindInputAttachments == rhs.maxDescriptorSetUpdateAfterBindInputAttachments );
-    }
-
-    bool operator!=( PhysicalDeviceDescriptorIndexingPropertiesEXT const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingPropertiesEXT;
-
-  public:
-    void* pNext = nullptr;
-    uint32_t maxUpdateAfterBindDescriptorsInAllPools;
-    Bool32 shaderUniformBufferArrayNonUniformIndexingNative;
-    Bool32 shaderSampledImageArrayNonUniformIndexingNative;
-    Bool32 shaderStorageBufferArrayNonUniformIndexingNative;
-    Bool32 shaderStorageImageArrayNonUniformIndexingNative;
-    Bool32 shaderInputAttachmentArrayNonUniformIndexingNative;
-    Bool32 robustBufferAccessUpdateAfterBind;
-    Bool32 quadDivergentImplicitLod;
-    uint32_t maxPerStageDescriptorUpdateAfterBindSamplers;
-    uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers;
-    uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers;
-    uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages;
-    uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages;
-    uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments;
-    uint32_t maxPerStageUpdateAfterBindResources;
-    uint32_t maxDescriptorSetUpdateAfterBindSamplers;
-    uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers;
-    uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic;
-    uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers;
-    uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic;
-    uint32_t maxDescriptorSetUpdateAfterBindSampledImages;
-    uint32_t maxDescriptorSetUpdateAfterBindStorageImages;
-    uint32_t maxDescriptorSetUpdateAfterBindInputAttachments;
-  };
-  static_assert( sizeof( PhysicalDeviceDescriptorIndexingPropertiesEXT ) == sizeof( VkPhysicalDeviceDescriptorIndexingPropertiesEXT ), "struct and wrapper have different size!" );
-
-  struct DescriptorSetVariableDescriptorCountAllocateInfoEXT
-  {
-    DescriptorSetVariableDescriptorCountAllocateInfoEXT( uint32_t descriptorSetCount_ = 0,
-                                                         const uint32_t* pDescriptorCounts_ = nullptr )
-      : descriptorSetCount( descriptorSetCount_ )
-      , pDescriptorCounts( pDescriptorCounts_ )
-    {
-    }
-
-    DescriptorSetVariableDescriptorCountAllocateInfoEXT( VkDescriptorSetVariableDescriptorCountAllocateInfoEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DescriptorSetVariableDescriptorCountAllocateInfoEXT ) );
-    }
-
-    DescriptorSetVariableDescriptorCountAllocateInfoEXT& operator=( VkDescriptorSetVariableDescriptorCountAllocateInfoEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DescriptorSetVariableDescriptorCountAllocateInfoEXT ) );
-      return *this;
-    }
-    DescriptorSetVariableDescriptorCountAllocateInfoEXT& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    DescriptorSetVariableDescriptorCountAllocateInfoEXT& setDescriptorSetCount( uint32_t descriptorSetCount_ )
-    {
-      descriptorSetCount = descriptorSetCount_;
-      return *this;
-    }
-
-    DescriptorSetVariableDescriptorCountAllocateInfoEXT& setPDescriptorCounts( const uint32_t* pDescriptorCounts_ )
-    {
-      pDescriptorCounts = pDescriptorCounts_;
-      return *this;
-    }
-
-    operator VkDescriptorSetVariableDescriptorCountAllocateInfoEXT const&() const
-    {
-      return *reinterpret_cast<const VkDescriptorSetVariableDescriptorCountAllocateInfoEXT*>(this);
-    }
-
-    operator VkDescriptorSetVariableDescriptorCountAllocateInfoEXT &()
-    {
-      return *reinterpret_cast<VkDescriptorSetVariableDescriptorCountAllocateInfoEXT*>(this);
-    }
-
-    bool operator==( DescriptorSetVariableDescriptorCountAllocateInfoEXT const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( descriptorSetCount == rhs.descriptorSetCount )
-          && ( pDescriptorCounts == rhs.pDescriptorCounts );
-    }
-
-    bool operator!=( DescriptorSetVariableDescriptorCountAllocateInfoEXT const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountAllocateInfoEXT;
-
-  public:
-    const void* pNext = nullptr;
-    uint32_t descriptorSetCount;
-    const uint32_t* pDescriptorCounts;
-  };
-  static_assert( sizeof( DescriptorSetVariableDescriptorCountAllocateInfoEXT ) == sizeof( VkDescriptorSetVariableDescriptorCountAllocateInfoEXT ), "struct and wrapper have different size!" );
-
-  struct DescriptorSetVariableDescriptorCountLayoutSupportEXT
-  {
-    operator VkDescriptorSetVariableDescriptorCountLayoutSupportEXT const&() const
-    {
-      return *reinterpret_cast<const VkDescriptorSetVariableDescriptorCountLayoutSupportEXT*>(this);
-    }
-
-    operator VkDescriptorSetVariableDescriptorCountLayoutSupportEXT &()
-    {
-      return *reinterpret_cast<VkDescriptorSetVariableDescriptorCountLayoutSupportEXT*>(this);
-    }
-
-    bool operator==( DescriptorSetVariableDescriptorCountLayoutSupportEXT const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( maxVariableDescriptorCount == rhs.maxVariableDescriptorCount );
-    }
-
-    bool operator!=( DescriptorSetVariableDescriptorCountLayoutSupportEXT const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountLayoutSupportEXT;
-
-  public:
-    void* pNext = nullptr;
-    uint32_t maxVariableDescriptorCount;
-  };
-  static_assert( sizeof( DescriptorSetVariableDescriptorCountLayoutSupportEXT ) == sizeof( VkDescriptorSetVariableDescriptorCountLayoutSupportEXT ), "struct and wrapper have different size!" );
-
-  struct SubpassEndInfoKHR
-  {
-    SubpassEndInfoKHR(  )
-    {
-    }
-
-    SubpassEndInfoKHR( VkSubpassEndInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( SubpassEndInfoKHR ) );
-    }
-
-    SubpassEndInfoKHR& operator=( VkSubpassEndInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( SubpassEndInfoKHR ) );
-      return *this;
-    }
-    SubpassEndInfoKHR& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    operator VkSubpassEndInfoKHR const&() const
-    {
-      return *reinterpret_cast<const VkSubpassEndInfoKHR*>(this);
-    }
-
-    operator VkSubpassEndInfoKHR &()
-    {
-      return *reinterpret_cast<VkSubpassEndInfoKHR*>(this);
-    }
-
-    bool operator==( SubpassEndInfoKHR const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext );
-    }
-
-    bool operator!=( SubpassEndInfoKHR const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eSubpassEndInfoKHR;
-
-  public:
-    const void* pNext = nullptr;
-  };
-  static_assert( sizeof( SubpassEndInfoKHR ) == sizeof( VkSubpassEndInfoKHR ), "struct and wrapper have different size!" );
-
-  struct PipelineVertexInputDivisorStateCreateInfoEXT
-  {
-    PipelineVertexInputDivisorStateCreateInfoEXT( uint32_t vertexBindingDivisorCount_ = 0,
-                                                  const VertexInputBindingDivisorDescriptionEXT* pVertexBindingDivisors_ = nullptr )
-      : vertexBindingDivisorCount( vertexBindingDivisorCount_ )
-      , pVertexBindingDivisors( pVertexBindingDivisors_ )
-    {
-    }
-
-    PipelineVertexInputDivisorStateCreateInfoEXT( VkPipelineVertexInputDivisorStateCreateInfoEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PipelineVertexInputDivisorStateCreateInfoEXT ) );
-    }
-
-    PipelineVertexInputDivisorStateCreateInfoEXT& operator=( VkPipelineVertexInputDivisorStateCreateInfoEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PipelineVertexInputDivisorStateCreateInfoEXT ) );
-      return *this;
-    }
-    PipelineVertexInputDivisorStateCreateInfoEXT& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    PipelineVertexInputDivisorStateCreateInfoEXT& setVertexBindingDivisorCount( uint32_t vertexBindingDivisorCount_ )
-    {
-      vertexBindingDivisorCount = vertexBindingDivisorCount_;
-      return *this;
-    }
-
-    PipelineVertexInputDivisorStateCreateInfoEXT& setPVertexBindingDivisors( const VertexInputBindingDivisorDescriptionEXT* pVertexBindingDivisors_ )
-    {
-      pVertexBindingDivisors = pVertexBindingDivisors_;
-      return *this;
-    }
-
-    operator VkPipelineVertexInputDivisorStateCreateInfoEXT const&() const
-    {
-      return *reinterpret_cast<const VkPipelineVertexInputDivisorStateCreateInfoEXT*>(this);
-    }
-
-    operator VkPipelineVertexInputDivisorStateCreateInfoEXT &()
-    {
-      return *reinterpret_cast<VkPipelineVertexInputDivisorStateCreateInfoEXT*>(this);
-    }
-
-    bool operator==( PipelineVertexInputDivisorStateCreateInfoEXT const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( vertexBindingDivisorCount == rhs.vertexBindingDivisorCount )
-          && ( pVertexBindingDivisors == rhs.pVertexBindingDivisors );
-    }
-
-    bool operator!=( PipelineVertexInputDivisorStateCreateInfoEXT const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT;
-
-  public:
-    const void* pNext = nullptr;
-    uint32_t vertexBindingDivisorCount;
-    const VertexInputBindingDivisorDescriptionEXT* pVertexBindingDivisors;
-  };
-  static_assert( sizeof( PipelineVertexInputDivisorStateCreateInfoEXT ) == sizeof( VkPipelineVertexInputDivisorStateCreateInfoEXT ), "struct and wrapper have different size!" );
-
-  struct PhysicalDeviceVertexAttributeDivisorPropertiesEXT
-  {
-    PhysicalDeviceVertexAttributeDivisorPropertiesEXT( uint32_t maxVertexAttribDivisor_ = 0 )
-      : maxVertexAttribDivisor( maxVertexAttribDivisor_ )
-    {
-    }
-
-    PhysicalDeviceVertexAttributeDivisorPropertiesEXT( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceVertexAttributeDivisorPropertiesEXT ) );
-    }
-
-    PhysicalDeviceVertexAttributeDivisorPropertiesEXT& operator=( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceVertexAttributeDivisorPropertiesEXT ) );
-      return *this;
-    }
-    PhysicalDeviceVertexAttributeDivisorPropertiesEXT& setPNext( void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    PhysicalDeviceVertexAttributeDivisorPropertiesEXT& setMaxVertexAttribDivisor( uint32_t maxVertexAttribDivisor_ )
-    {
-      maxVertexAttribDivisor = maxVertexAttribDivisor_;
-      return *this;
-    }
-
-    operator VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const&() const
-    {
-      return *reinterpret_cast<const VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT*>(this);
-    }
-
-    operator VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT &()
-    {
-      return *reinterpret_cast<VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT*>(this);
-    }
-
-    bool operator==( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( maxVertexAttribDivisor == rhs.maxVertexAttribDivisor );
-    }
-
-    bool operator!=( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT;
-
-  public:
-    void* pNext = nullptr;
-    uint32_t maxVertexAttribDivisor;
-  };
-  static_assert( sizeof( PhysicalDeviceVertexAttributeDivisorPropertiesEXT ) == sizeof( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT ), "struct and wrapper have different size!" );
-
-  struct PhysicalDevicePCIBusInfoPropertiesEXT
-  {
-    operator VkPhysicalDevicePCIBusInfoPropertiesEXT const&() const
-    {
-      return *reinterpret_cast<const VkPhysicalDevicePCIBusInfoPropertiesEXT*>(this);
-    }
-
-    operator VkPhysicalDevicePCIBusInfoPropertiesEXT &()
-    {
-      return *reinterpret_cast<VkPhysicalDevicePCIBusInfoPropertiesEXT*>(this);
-    }
-
-    bool operator==( PhysicalDevicePCIBusInfoPropertiesEXT const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( pciDomain == rhs.pciDomain )
-          && ( pciBus == rhs.pciBus )
-          && ( pciDevice == rhs.pciDevice )
-          && ( pciFunction == rhs.pciFunction );
-    }
-
-    bool operator!=( PhysicalDevicePCIBusInfoPropertiesEXT const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePhysicalDevicePciBusInfoPropertiesEXT;
-
-  public:
-    void* pNext = nullptr;
-    uint16_t pciDomain;
-    uint8_t pciBus;
-    uint8_t pciDevice;
-    uint8_t pciFunction;
-  };
-  static_assert( sizeof( PhysicalDevicePCIBusInfoPropertiesEXT ) == sizeof( VkPhysicalDevicePCIBusInfoPropertiesEXT ), "struct and wrapper have different size!" );
-
-#ifdef VK_USE_PLATFORM_ANDROID_ANDROID
-  struct ImportAndroidHardwareBufferInfoANDROID
-  {
-    ImportAndroidHardwareBufferInfoANDROID( struct AHardwareBuffer* buffer_ = nullptr )
-      : buffer( buffer_ )
-    {
-    }
-
-    ImportAndroidHardwareBufferInfoANDROID( VkImportAndroidHardwareBufferInfoANDROID const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ImportAndroidHardwareBufferInfoANDROID ) );
-    }
-
-    ImportAndroidHardwareBufferInfoANDROID& operator=( VkImportAndroidHardwareBufferInfoANDROID const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ImportAndroidHardwareBufferInfoANDROID ) );
-      return *this;
-    }
-    ImportAndroidHardwareBufferInfoANDROID& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    ImportAndroidHardwareBufferInfoANDROID& setBuffer( struct AHardwareBuffer* buffer_ )
-    {
-      buffer = buffer_;
-      return *this;
-    }
-
-    operator VkImportAndroidHardwareBufferInfoANDROID const&() const
-    {
-      return *reinterpret_cast<const VkImportAndroidHardwareBufferInfoANDROID*>(this);
-    }
-
-    operator VkImportAndroidHardwareBufferInfoANDROID &()
-    {
-      return *reinterpret_cast<VkImportAndroidHardwareBufferInfoANDROID*>(this);
-    }
-
-    bool operator==( ImportAndroidHardwareBufferInfoANDROID const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( buffer == rhs.buffer );
-    }
-
-    bool operator!=( ImportAndroidHardwareBufferInfoANDROID const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eImportAndroidHardwareBufferInfoANDROID;
-
-  public:
-    const void* pNext = nullptr;
-    struct AHardwareBuffer* buffer;
-  };
-  static_assert( sizeof( ImportAndroidHardwareBufferInfoANDROID ) == sizeof( VkImportAndroidHardwareBufferInfoANDROID ), "struct and wrapper have different size!" );
-#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/
-
-#ifdef VK_USE_PLATFORM_ANDROID_ANDROID
-  struct AndroidHardwareBufferUsageANDROID
-  {
-    operator VkAndroidHardwareBufferUsageANDROID const&() const
-    {
-      return *reinterpret_cast<const VkAndroidHardwareBufferUsageANDROID*>(this);
-    }
-
-    operator VkAndroidHardwareBufferUsageANDROID &()
-    {
-      return *reinterpret_cast<VkAndroidHardwareBufferUsageANDROID*>(this);
-    }
-
-    bool operator==( AndroidHardwareBufferUsageANDROID const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( androidHardwareBufferUsage == rhs.androidHardwareBufferUsage );
-    }
-
-    bool operator!=( AndroidHardwareBufferUsageANDROID const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eAndroidHardwareBufferUsageANDROID;
-
-  public:
-    void* pNext = nullptr;
-    uint64_t androidHardwareBufferUsage;
-  };
-  static_assert( sizeof( AndroidHardwareBufferUsageANDROID ) == sizeof( VkAndroidHardwareBufferUsageANDROID ), "struct and wrapper have different size!" );
-#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/
-
-#ifdef VK_USE_PLATFORM_ANDROID_ANDROID
-  struct AndroidHardwareBufferPropertiesANDROID
-  {
-    operator VkAndroidHardwareBufferPropertiesANDROID const&() const
-    {
-      return *reinterpret_cast<const VkAndroidHardwareBufferPropertiesANDROID*>(this);
-    }
-
-    operator VkAndroidHardwareBufferPropertiesANDROID &()
-    {
-      return *reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID*>(this);
-    }
-
-    bool operator==( AndroidHardwareBufferPropertiesANDROID const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( allocationSize == rhs.allocationSize )
-          && ( memoryTypeBits == rhs.memoryTypeBits );
-    }
-
-    bool operator!=( AndroidHardwareBufferPropertiesANDROID const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eAndroidHardwareBufferPropertiesANDROID;
-
-  public:
-    void* pNext = nullptr;
-    DeviceSize allocationSize;
-    uint32_t memoryTypeBits;
-  };
-  static_assert( sizeof( AndroidHardwareBufferPropertiesANDROID ) == sizeof( VkAndroidHardwareBufferPropertiesANDROID ), "struct and wrapper have different size!" );
-#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/
-
-#ifdef VK_USE_PLATFORM_ANDROID_ANDROID
-  struct MemoryGetAndroidHardwareBufferInfoANDROID
-  {
-    MemoryGetAndroidHardwareBufferInfoANDROID( DeviceMemory memory_ = DeviceMemory() )
-      : memory( memory_ )
-    {
-    }
-
-    MemoryGetAndroidHardwareBufferInfoANDROID( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( MemoryGetAndroidHardwareBufferInfoANDROID ) );
-    }
-
-    MemoryGetAndroidHardwareBufferInfoANDROID& operator=( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( MemoryGetAndroidHardwareBufferInfoANDROID ) );
-      return *this;
-    }
-    MemoryGetAndroidHardwareBufferInfoANDROID& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    MemoryGetAndroidHardwareBufferInfoANDROID& setMemory( DeviceMemory memory_ )
-    {
-      memory = memory_;
-      return *this;
-    }
-
-    operator VkMemoryGetAndroidHardwareBufferInfoANDROID const&() const
-    {
-      return *reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID*>(this);
-    }
-
-    operator VkMemoryGetAndroidHardwareBufferInfoANDROID &()
-    {
-      return *reinterpret_cast<VkMemoryGetAndroidHardwareBufferInfoANDROID*>(this);
-    }
-
-    bool operator==( MemoryGetAndroidHardwareBufferInfoANDROID const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( memory == rhs.memory );
-    }
-
-    bool operator!=( MemoryGetAndroidHardwareBufferInfoANDROID const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID;
-
-  public:
-    const void* pNext = nullptr;
-    DeviceMemory memory;
-  };
-  static_assert( sizeof( MemoryGetAndroidHardwareBufferInfoANDROID ) == sizeof( VkMemoryGetAndroidHardwareBufferInfoANDROID ), "struct and wrapper have different size!" );
-#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/
-
-  struct CommandBufferInheritanceConditionalRenderingInfoEXT
-  {
-    CommandBufferInheritanceConditionalRenderingInfoEXT( Bool32 conditionalRenderingEnable_ = 0 )
-      : conditionalRenderingEnable( conditionalRenderingEnable_ )
-    {
-    }
-
-    CommandBufferInheritanceConditionalRenderingInfoEXT( VkCommandBufferInheritanceConditionalRenderingInfoEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( CommandBufferInheritanceConditionalRenderingInfoEXT ) );
-    }
-
-    CommandBufferInheritanceConditionalRenderingInfoEXT& operator=( VkCommandBufferInheritanceConditionalRenderingInfoEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( CommandBufferInheritanceConditionalRenderingInfoEXT ) );
-      return *this;
-    }
-    CommandBufferInheritanceConditionalRenderingInfoEXT& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    CommandBufferInheritanceConditionalRenderingInfoEXT& setConditionalRenderingEnable( Bool32 conditionalRenderingEnable_ )
-    {
-      conditionalRenderingEnable = conditionalRenderingEnable_;
-      return *this;
-    }
-
-    operator VkCommandBufferInheritanceConditionalRenderingInfoEXT const&() const
-    {
-      return *reinterpret_cast<const VkCommandBufferInheritanceConditionalRenderingInfoEXT*>(this);
-    }
-
-    operator VkCommandBufferInheritanceConditionalRenderingInfoEXT &()
-    {
-      return *reinterpret_cast<VkCommandBufferInheritanceConditionalRenderingInfoEXT*>(this);
-    }
-
-    bool operator==( CommandBufferInheritanceConditionalRenderingInfoEXT const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( conditionalRenderingEnable == rhs.conditionalRenderingEnable );
-    }
-
-    bool operator!=( CommandBufferInheritanceConditionalRenderingInfoEXT const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT;
-
-  public:
-    const void* pNext = nullptr;
-    Bool32 conditionalRenderingEnable;
-  };
-  static_assert( sizeof( CommandBufferInheritanceConditionalRenderingInfoEXT ) == sizeof( VkCommandBufferInheritanceConditionalRenderingInfoEXT ), "struct and wrapper have different size!" );
-
-#ifdef VK_USE_PLATFORM_ANDROID_ANDROID
-  struct ExternalFormatANDROID
-  {
-    ExternalFormatANDROID( uint64_t externalFormat_ = 0 )
-      : externalFormat( externalFormat_ )
-    {
-    }
-
-    ExternalFormatANDROID( VkExternalFormatANDROID const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ExternalFormatANDROID ) );
-    }
-
-    ExternalFormatANDROID& operator=( VkExternalFormatANDROID const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ExternalFormatANDROID ) );
-      return *this;
-    }
-    ExternalFormatANDROID& setPNext( void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    ExternalFormatANDROID& setExternalFormat( uint64_t externalFormat_ )
-    {
-      externalFormat = externalFormat_;
-      return *this;
-    }
-
-    operator VkExternalFormatANDROID const&() const
-    {
-      return *reinterpret_cast<const VkExternalFormatANDROID*>(this);
-    }
-
-    operator VkExternalFormatANDROID &()
-    {
-      return *reinterpret_cast<VkExternalFormatANDROID*>(this);
-    }
-
-    bool operator==( ExternalFormatANDROID const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( externalFormat == rhs.externalFormat );
-    }
-
-    bool operator!=( ExternalFormatANDROID const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eExternalFormatANDROID;
-
-  public:
-    void* pNext = nullptr;
-    uint64_t externalFormat;
-  };
-  static_assert( sizeof( ExternalFormatANDROID ) == sizeof( VkExternalFormatANDROID ), "struct and wrapper have different size!" );
-#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/
-
-  struct PhysicalDevice8BitStorageFeaturesKHR
-  {
-    PhysicalDevice8BitStorageFeaturesKHR( Bool32 storageBuffer8BitAccess_ = 0,
-                                          Bool32 uniformAndStorageBuffer8BitAccess_ = 0,
-                                          Bool32 storagePushConstant8_ = 0 )
-      : storageBuffer8BitAccess( storageBuffer8BitAccess_ )
-      , uniformAndStorageBuffer8BitAccess( uniformAndStorageBuffer8BitAccess_ )
-      , storagePushConstant8( storagePushConstant8_ )
-    {
-    }
-
-    PhysicalDevice8BitStorageFeaturesKHR( VkPhysicalDevice8BitStorageFeaturesKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDevice8BitStorageFeaturesKHR ) );
-    }
-
-    PhysicalDevice8BitStorageFeaturesKHR& operator=( VkPhysicalDevice8BitStorageFeaturesKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDevice8BitStorageFeaturesKHR ) );
-      return *this;
-    }
-    PhysicalDevice8BitStorageFeaturesKHR& setPNext( void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    PhysicalDevice8BitStorageFeaturesKHR& setStorageBuffer8BitAccess( Bool32 storageBuffer8BitAccess_ )
-    {
-      storageBuffer8BitAccess = storageBuffer8BitAccess_;
-      return *this;
-    }
-
-    PhysicalDevice8BitStorageFeaturesKHR& setUniformAndStorageBuffer8BitAccess( Bool32 uniformAndStorageBuffer8BitAccess_ )
-    {
-      uniformAndStorageBuffer8BitAccess = uniformAndStorageBuffer8BitAccess_;
-      return *this;
-    }
-
-    PhysicalDevice8BitStorageFeaturesKHR& setStoragePushConstant8( Bool32 storagePushConstant8_ )
-    {
-      storagePushConstant8 = storagePushConstant8_;
-      return *this;
-    }
-
-    operator VkPhysicalDevice8BitStorageFeaturesKHR const&() const
-    {
-      return *reinterpret_cast<const VkPhysicalDevice8BitStorageFeaturesKHR*>(this);
-    }
-
-    operator VkPhysicalDevice8BitStorageFeaturesKHR &()
-    {
-      return *reinterpret_cast<VkPhysicalDevice8BitStorageFeaturesKHR*>(this);
-    }
-
-    bool operator==( PhysicalDevice8BitStorageFeaturesKHR const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( storageBuffer8BitAccess == rhs.storageBuffer8BitAccess )
-          && ( uniformAndStorageBuffer8BitAccess == rhs.uniformAndStorageBuffer8BitAccess )
-          && ( storagePushConstant8 == rhs.storagePushConstant8 );
-    }
-
-    bool operator!=( PhysicalDevice8BitStorageFeaturesKHR const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePhysicalDevice8BitStorageFeaturesKHR;
-
-  public:
-    void* pNext = nullptr;
-    Bool32 storageBuffer8BitAccess;
-    Bool32 uniformAndStorageBuffer8BitAccess;
-    Bool32 storagePushConstant8;
-  };
-  static_assert( sizeof( PhysicalDevice8BitStorageFeaturesKHR ) == sizeof( VkPhysicalDevice8BitStorageFeaturesKHR ), "struct and wrapper have different size!" );
-
-  struct PhysicalDeviceConditionalRenderingFeaturesEXT
-  {
-    PhysicalDeviceConditionalRenderingFeaturesEXT( Bool32 conditionalRendering_ = 0,
-                                                   Bool32 inheritedConditionalRendering_ = 0 )
-      : conditionalRendering( conditionalRendering_ )
-      , inheritedConditionalRendering( inheritedConditionalRendering_ )
-    {
-    }
-
-    PhysicalDeviceConditionalRenderingFeaturesEXT( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceConditionalRenderingFeaturesEXT ) );
-    }
-
-    PhysicalDeviceConditionalRenderingFeaturesEXT& operator=( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceConditionalRenderingFeaturesEXT ) );
-      return *this;
-    }
-    PhysicalDeviceConditionalRenderingFeaturesEXT& setPNext( void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    PhysicalDeviceConditionalRenderingFeaturesEXT& setConditionalRendering( Bool32 conditionalRendering_ )
-    {
-      conditionalRendering = conditionalRendering_;
-      return *this;
-    }
-
-    PhysicalDeviceConditionalRenderingFeaturesEXT& setInheritedConditionalRendering( Bool32 inheritedConditionalRendering_ )
-    {
-      inheritedConditionalRendering = inheritedConditionalRendering_;
-      return *this;
-    }
-
-    operator VkPhysicalDeviceConditionalRenderingFeaturesEXT const&() const
-    {
-      return *reinterpret_cast<const VkPhysicalDeviceConditionalRenderingFeaturesEXT*>(this);
-    }
-
-    operator VkPhysicalDeviceConditionalRenderingFeaturesEXT &()
-    {
-      return *reinterpret_cast<VkPhysicalDeviceConditionalRenderingFeaturesEXT*>(this);
-    }
-
-    bool operator==( PhysicalDeviceConditionalRenderingFeaturesEXT const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( conditionalRendering == rhs.conditionalRendering )
-          && ( inheritedConditionalRendering == rhs.inheritedConditionalRendering );
-    }
-
-    bool operator!=( PhysicalDeviceConditionalRenderingFeaturesEXT const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT;
-
-  public:
-    void* pNext = nullptr;
-    Bool32 conditionalRendering;
-    Bool32 inheritedConditionalRendering;
-  };
-  static_assert( sizeof( PhysicalDeviceConditionalRenderingFeaturesEXT ) == sizeof( VkPhysicalDeviceConditionalRenderingFeaturesEXT ), "struct and wrapper have different size!" );
-
-  struct PhysicalDeviceVulkanMemoryModelFeaturesKHR
-  {
-    operator VkPhysicalDeviceVulkanMemoryModelFeaturesKHR const&() const
-    {
-      return *reinterpret_cast<const VkPhysicalDeviceVulkanMemoryModelFeaturesKHR*>(this);
-    }
-
-    operator VkPhysicalDeviceVulkanMemoryModelFeaturesKHR &()
-    {
-      return *reinterpret_cast<VkPhysicalDeviceVulkanMemoryModelFeaturesKHR*>(this);
-    }
-
-    bool operator==( PhysicalDeviceVulkanMemoryModelFeaturesKHR const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( vulkanMemoryModel == rhs.vulkanMemoryModel )
-          && ( vulkanMemoryModelDeviceScope == rhs.vulkanMemoryModelDeviceScope );
-    }
-
-    bool operator!=( PhysicalDeviceVulkanMemoryModelFeaturesKHR const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePhysicalDeviceVulkanMemoryModelFeaturesKHR;
-
-  public:
-    void* pNext = nullptr;
-    Bool32 vulkanMemoryModel;
-    Bool32 vulkanMemoryModelDeviceScope;
-  };
-  static_assert( sizeof( PhysicalDeviceVulkanMemoryModelFeaturesKHR ) == sizeof( VkPhysicalDeviceVulkanMemoryModelFeaturesKHR ), "struct and wrapper have different size!" );
-
-  struct PhysicalDeviceShaderAtomicInt64FeaturesKHR
-  {
-    PhysicalDeviceShaderAtomicInt64FeaturesKHR( Bool32 shaderBufferInt64Atomics_ = 0,
-                                                Bool32 shaderSharedInt64Atomics_ = 0 )
-      : shaderBufferInt64Atomics( shaderBufferInt64Atomics_ )
-      , shaderSharedInt64Atomics( shaderSharedInt64Atomics_ )
-    {
-    }
-
-    PhysicalDeviceShaderAtomicInt64FeaturesKHR( VkPhysicalDeviceShaderAtomicInt64FeaturesKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceShaderAtomicInt64FeaturesKHR ) );
-    }
-
-    PhysicalDeviceShaderAtomicInt64FeaturesKHR& operator=( VkPhysicalDeviceShaderAtomicInt64FeaturesKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceShaderAtomicInt64FeaturesKHR ) );
-      return *this;
-    }
-    PhysicalDeviceShaderAtomicInt64FeaturesKHR& setPNext( void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    PhysicalDeviceShaderAtomicInt64FeaturesKHR& setShaderBufferInt64Atomics( Bool32 shaderBufferInt64Atomics_ )
-    {
-      shaderBufferInt64Atomics = shaderBufferInt64Atomics_;
-      return *this;
-    }
-
-    PhysicalDeviceShaderAtomicInt64FeaturesKHR& setShaderSharedInt64Atomics( Bool32 shaderSharedInt64Atomics_ )
-    {
-      shaderSharedInt64Atomics = shaderSharedInt64Atomics_;
-      return *this;
-    }
-
-    operator VkPhysicalDeviceShaderAtomicInt64FeaturesKHR const&() const
-    {
-      return *reinterpret_cast<const VkPhysicalDeviceShaderAtomicInt64FeaturesKHR*>(this);
-    }
-
-    operator VkPhysicalDeviceShaderAtomicInt64FeaturesKHR &()
-    {
-      return *reinterpret_cast<VkPhysicalDeviceShaderAtomicInt64FeaturesKHR*>(this);
-    }
-
-    bool operator==( PhysicalDeviceShaderAtomicInt64FeaturesKHR const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( shaderBufferInt64Atomics == rhs.shaderBufferInt64Atomics )
-          && ( shaderSharedInt64Atomics == rhs.shaderSharedInt64Atomics );
-    }
-
-    bool operator!=( PhysicalDeviceShaderAtomicInt64FeaturesKHR const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePhysicalDeviceShaderAtomicInt64FeaturesKHR;
-
-  public:
-    void* pNext = nullptr;
-    Bool32 shaderBufferInt64Atomics;
-    Bool32 shaderSharedInt64Atomics;
-  };
-  static_assert( sizeof( PhysicalDeviceShaderAtomicInt64FeaturesKHR ) == sizeof( VkPhysicalDeviceShaderAtomicInt64FeaturesKHR ), "struct and wrapper have different size!" );
-
-  struct PhysicalDeviceVertexAttributeDivisorFeaturesEXT
-  {
-    PhysicalDeviceVertexAttributeDivisorFeaturesEXT( Bool32 vertexAttributeInstanceRateDivisor_ = 0,
-                                                     Bool32 vertexAttributeInstanceRateZeroDivisor_ = 0 )
-      : vertexAttributeInstanceRateDivisor( vertexAttributeInstanceRateDivisor_ )
-      , vertexAttributeInstanceRateZeroDivisor( vertexAttributeInstanceRateZeroDivisor_ )
-    {
-    }
-
-    PhysicalDeviceVertexAttributeDivisorFeaturesEXT( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceVertexAttributeDivisorFeaturesEXT ) );
-    }
-
-    PhysicalDeviceVertexAttributeDivisorFeaturesEXT& operator=( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceVertexAttributeDivisorFeaturesEXT ) );
-      return *this;
-    }
-    PhysicalDeviceVertexAttributeDivisorFeaturesEXT& setPNext( void* pNext_ )
-    {
-      pNext = pNext_;
+      minY = minY_;
       return *this;
     }
 
-    PhysicalDeviceVertexAttributeDivisorFeaturesEXT& setVertexAttributeInstanceRateDivisor( Bool32 vertexAttributeInstanceRateDivisor_ )
+    AabbPositionsKHR & setMinZ( float minZ_ ) VULKAN_HPP_NOEXCEPT
     {
-      vertexAttributeInstanceRateDivisor = vertexAttributeInstanceRateDivisor_;
+      minZ = minZ_;
       return *this;
     }
 
-    PhysicalDeviceVertexAttributeDivisorFeaturesEXT& setVertexAttributeInstanceRateZeroDivisor( Bool32 vertexAttributeInstanceRateZeroDivisor_ )
+    AabbPositionsKHR & setMaxX( float maxX_ ) VULKAN_HPP_NOEXCEPT
     {
-      vertexAttributeInstanceRateZeroDivisor = vertexAttributeInstanceRateZeroDivisor_;
+      maxX = maxX_;
       return *this;
     }
 
-    operator VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const&() const
+    AabbPositionsKHR & setMaxY( float maxY_ ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT*>(this);
-    }
-
-    operator VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT &()
-    {
-      return *reinterpret_cast<VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT*>(this);
-    }
-
-    bool operator==( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( vertexAttributeInstanceRateDivisor == rhs.vertexAttributeInstanceRateDivisor )
-          && ( vertexAttributeInstanceRateZeroDivisor == rhs.vertexAttributeInstanceRateZeroDivisor );
-    }
-
-    bool operator!=( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT;
-
-  public:
-    void* pNext = nullptr;
-    Bool32 vertexAttributeInstanceRateDivisor;
-    Bool32 vertexAttributeInstanceRateZeroDivisor;
-  };
-  static_assert( sizeof( PhysicalDeviceVertexAttributeDivisorFeaturesEXT ) == sizeof( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT ), "struct and wrapper have different size!" );
-
-  struct ImageViewASTCDecodeModeEXT
-  {
-    ImageViewASTCDecodeModeEXT( Format decodeMode_ = Format::eUndefined )
-      : decodeMode( decodeMode_ )
-    {
-    }
-
-    ImageViewASTCDecodeModeEXT( VkImageViewASTCDecodeModeEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ImageViewASTCDecodeModeEXT ) );
-    }
-
-    ImageViewASTCDecodeModeEXT& operator=( VkImageViewASTCDecodeModeEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ImageViewASTCDecodeModeEXT ) );
-      return *this;
-    }
-    ImageViewASTCDecodeModeEXT& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
+      maxY = maxY_;
       return *this;
     }
 
-    ImageViewASTCDecodeModeEXT& setDecodeMode( Format decodeMode_ )
+    AabbPositionsKHR & setMaxZ( float maxZ_ ) VULKAN_HPP_NOEXCEPT
     {
-      decodeMode = decodeMode_;
+      maxZ = maxZ_;
       return *this;
     }
 
-    operator VkImageViewASTCDecodeModeEXT const&() const
-    {
-      return *reinterpret_cast<const VkImageViewASTCDecodeModeEXT*>(this);
-    }
 
-    operator VkImageViewASTCDecodeModeEXT &()
+    operator VkAabbPositionsKHR const&() const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkImageViewASTCDecodeModeEXT*>(this);
-    }
-
-    bool operator==( ImageViewASTCDecodeModeEXT const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( decodeMode == rhs.decodeMode );
-    }
-
-    bool operator!=( ImageViewASTCDecodeModeEXT const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eImageViewAstcDecodeModeEXT;
-
-  public:
-    const void* pNext = nullptr;
-    Format decodeMode;
-  };
-  static_assert( sizeof( ImageViewASTCDecodeModeEXT ) == sizeof( VkImageViewASTCDecodeModeEXT ), "struct and wrapper have different size!" );
-
-  struct PhysicalDeviceASTCDecodeFeaturesEXT
-  {
-    PhysicalDeviceASTCDecodeFeaturesEXT( Bool32 decodeModeSharedExponent_ = 0 )
-      : decodeModeSharedExponent( decodeModeSharedExponent_ )
-    {
-    }
-
-    PhysicalDeviceASTCDecodeFeaturesEXT( VkPhysicalDeviceASTCDecodeFeaturesEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceASTCDecodeFeaturesEXT ) );
-    }
-
-    PhysicalDeviceASTCDecodeFeaturesEXT& operator=( VkPhysicalDeviceASTCDecodeFeaturesEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceASTCDecodeFeaturesEXT ) );
-      return *this;
-    }
-    PhysicalDeviceASTCDecodeFeaturesEXT& setPNext( void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    PhysicalDeviceASTCDecodeFeaturesEXT& setDecodeModeSharedExponent( Bool32 decodeModeSharedExponent_ )
-    {
-      decodeModeSharedExponent = decodeModeSharedExponent_;
-      return *this;
+      return *reinterpret_cast<const VkAabbPositionsKHR*>( this );
     }
 
-    operator VkPhysicalDeviceASTCDecodeFeaturesEXT const&() const
+    operator VkAabbPositionsKHR &() VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkPhysicalDeviceASTCDecodeFeaturesEXT*>(this);
+      return *reinterpret_cast<VkAabbPositionsKHR*>( this );
     }
 
-    operator VkPhysicalDeviceASTCDecodeFeaturesEXT &()
-    {
-      return *reinterpret_cast<VkPhysicalDeviceASTCDecodeFeaturesEXT*>(this);
-    }
 
-    bool operator==( PhysicalDeviceASTCDecodeFeaturesEXT const& rhs ) const
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( AabbPositionsKHR const& ) const = default;
+#else
+    bool operator==( AabbPositionsKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( decodeModeSharedExponent == rhs.decodeModeSharedExponent );
+      return ( minX == rhs.minX )
+          && ( minY == rhs.minY )
+          && ( minZ == rhs.minZ )
+          && ( maxX == rhs.maxX )
+          && ( maxY == rhs.maxY )
+          && ( maxZ == rhs.maxZ );
     }
 
-    bool operator!=( PhysicalDeviceASTCDecodeFeaturesEXT const& rhs ) const
+    bool operator!=( AabbPositionsKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
-
-  private:
-    StructureType sType = StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT;
-
-  public:
-    void* pNext = nullptr;
-    Bool32 decodeModeSharedExponent;
-  };
-  static_assert( sizeof( PhysicalDeviceASTCDecodeFeaturesEXT ) == sizeof( VkPhysicalDeviceASTCDecodeFeaturesEXT ), "struct and wrapper have different size!" );
-
-  struct PhysicalDeviceTransformFeedbackFeaturesEXT
-  {
-    PhysicalDeviceTransformFeedbackFeaturesEXT( Bool32 transformFeedback_ = 0,
-                                                Bool32 geometryStreams_ = 0 )
-      : transformFeedback( transformFeedback_ )
-      , geometryStreams( geometryStreams_ )
-    {
-    }
-
-    PhysicalDeviceTransformFeedbackFeaturesEXT( VkPhysicalDeviceTransformFeedbackFeaturesEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceTransformFeedbackFeaturesEXT ) );
-    }
-
-    PhysicalDeviceTransformFeedbackFeaturesEXT& operator=( VkPhysicalDeviceTransformFeedbackFeaturesEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceTransformFeedbackFeaturesEXT ) );
-      return *this;
-    }
-    PhysicalDeviceTransformFeedbackFeaturesEXT& setPNext( void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    PhysicalDeviceTransformFeedbackFeaturesEXT& setTransformFeedback( Bool32 transformFeedback_ )
-    {
-      transformFeedback = transformFeedback_;
-      return *this;
-    }
-
-    PhysicalDeviceTransformFeedbackFeaturesEXT& setGeometryStreams( Bool32 geometryStreams_ )
-    {
-      geometryStreams = geometryStreams_;
-      return *this;
-    }
-
-    operator VkPhysicalDeviceTransformFeedbackFeaturesEXT const&() const
-    {
-      return *reinterpret_cast<const VkPhysicalDeviceTransformFeedbackFeaturesEXT*>(this);
-    }
-
-    operator VkPhysicalDeviceTransformFeedbackFeaturesEXT &()
-    {
-      return *reinterpret_cast<VkPhysicalDeviceTransformFeedbackFeaturesEXT*>(this);
-    }
-
-    bool operator==( PhysicalDeviceTransformFeedbackFeaturesEXT const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( transformFeedback == rhs.transformFeedback )
-          && ( geometryStreams == rhs.geometryStreams );
-    }
+#endif
 
-    bool operator!=( PhysicalDeviceTransformFeedbackFeaturesEXT const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
 
-  private:
-    StructureType sType = StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT;
 
   public:
-    void* pNext = nullptr;
-    Bool32 transformFeedback;
-    Bool32 geometryStreams;
-  };
-  static_assert( sizeof( PhysicalDeviceTransformFeedbackFeaturesEXT ) == sizeof( VkPhysicalDeviceTransformFeedbackFeaturesEXT ), "struct and wrapper have different size!" );
-
-  struct PhysicalDeviceTransformFeedbackPropertiesEXT
-  {
-    operator VkPhysicalDeviceTransformFeedbackPropertiesEXT const&() const
-    {
-      return *reinterpret_cast<const VkPhysicalDeviceTransformFeedbackPropertiesEXT*>(this);
-    }
-
-    operator VkPhysicalDeviceTransformFeedbackPropertiesEXT &()
-    {
-      return *reinterpret_cast<VkPhysicalDeviceTransformFeedbackPropertiesEXT*>(this);
-    }
-
-    bool operator==( PhysicalDeviceTransformFeedbackPropertiesEXT const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( maxTransformFeedbackStreams == rhs.maxTransformFeedbackStreams )
-          && ( maxTransformFeedbackBuffers == rhs.maxTransformFeedbackBuffers )
-          && ( maxTransformFeedbackBufferSize == rhs.maxTransformFeedbackBufferSize )
-          && ( maxTransformFeedbackStreamDataSize == rhs.maxTransformFeedbackStreamDataSize )
-          && ( maxTransformFeedbackBufferDataSize == rhs.maxTransformFeedbackBufferDataSize )
-          && ( maxTransformFeedbackBufferDataStride == rhs.maxTransformFeedbackBufferDataStride )
-          && ( transformFeedbackQueries == rhs.transformFeedbackQueries )
-          && ( transformFeedbackStreamsLinesTriangles == rhs.transformFeedbackStreamsLinesTriangles )
-          && ( transformFeedbackRasterizationStreamSelect == rhs.transformFeedbackRasterizationStreamSelect )
-          && ( transformFeedbackDraw == rhs.transformFeedbackDraw );
-    }
-
-    bool operator!=( PhysicalDeviceTransformFeedbackPropertiesEXT const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT;
+    float minX = {};
+    float minY = {};
+    float minZ = {};
+    float maxX = {};
+    float maxY = {};
+    float maxZ = {};
 
-  public:
-    void* pNext = nullptr;
-    uint32_t maxTransformFeedbackStreams;
-    uint32_t maxTransformFeedbackBuffers;
-    DeviceSize maxTransformFeedbackBufferSize;
-    uint32_t maxTransformFeedbackStreamDataSize;
-    uint32_t maxTransformFeedbackBufferDataSize;
-    uint32_t maxTransformFeedbackBufferDataStride;
-    Bool32 transformFeedbackQueries;
-    Bool32 transformFeedbackStreamsLinesTriangles;
-    Bool32 transformFeedbackRasterizationStreamSelect;
-    Bool32 transformFeedbackDraw;
   };
-  static_assert( sizeof( PhysicalDeviceTransformFeedbackPropertiesEXT ) == sizeof( VkPhysicalDeviceTransformFeedbackPropertiesEXT ), "struct and wrapper have different size!" );
+  static_assert( sizeof( AabbPositionsKHR ) == sizeof( VkAabbPositionsKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<AabbPositionsKHR>::value, "struct wrapper is not a standard layout!" );
+  using AabbPositionsNV = AabbPositionsKHR;
 
-  struct PipelineRasterizationStateStreamCreateInfoEXT
+  class AccelerationStructureKHR
   {
-    PipelineRasterizationStateStreamCreateInfoEXT( PipelineRasterizationStateStreamCreateFlagsEXT flags_ = PipelineRasterizationStateStreamCreateFlagsEXT(),
-                                                   uint32_t rasterizationStream_ = 0 )
-      : flags( flags_ )
-      , rasterizationStream( rasterizationStream_ )
-    {
-    }
-
-    PipelineRasterizationStateStreamCreateInfoEXT( VkPipelineRasterizationStateStreamCreateInfoEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PipelineRasterizationStateStreamCreateInfoEXT ) );
-    }
-
-    PipelineRasterizationStateStreamCreateInfoEXT& operator=( VkPipelineRasterizationStateStreamCreateInfoEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PipelineRasterizationStateStreamCreateInfoEXT ) );
-      return *this;
-    }
-    PipelineRasterizationStateStreamCreateInfoEXT& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    PipelineRasterizationStateStreamCreateInfoEXT& setFlags( PipelineRasterizationStateStreamCreateFlagsEXT flags_ )
-    {
-      flags = flags_;
-      return *this;
-    }
-
-    PipelineRasterizationStateStreamCreateInfoEXT& setRasterizationStream( uint32_t rasterizationStream_ )
-    {
-      rasterizationStream = rasterizationStream_;
-      return *this;
-    }
-
-    operator VkPipelineRasterizationStateStreamCreateInfoEXT const&() const
-    {
-      return *reinterpret_cast<const VkPipelineRasterizationStateStreamCreateInfoEXT*>(this);
-    }
-
-    operator VkPipelineRasterizationStateStreamCreateInfoEXT &()
-    {
-      return *reinterpret_cast<VkPipelineRasterizationStateStreamCreateInfoEXT*>(this);
-    }
-
-    bool operator==( PipelineRasterizationStateStreamCreateInfoEXT const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( rasterizationStream == rhs.rasterizationStream );
-    }
-
-    bool operator!=( PipelineRasterizationStateStreamCreateInfoEXT const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePipelineRasterizationStateStreamCreateInfoEXT;
-
   public:
-    const void* pNext = nullptr;
-    PipelineRasterizationStateStreamCreateFlagsEXT flags;
-    uint32_t rasterizationStream;
-  };
-  static_assert( sizeof( PipelineRasterizationStateStreamCreateInfoEXT ) == sizeof( VkPipelineRasterizationStateStreamCreateInfoEXT ), "struct and wrapper have different size!" );
+    using CType = VkAccelerationStructureKHR;
 
-  struct PhysicalDeviceRepresentativeFragmentTestFeaturesNV
-  {
-    PhysicalDeviceRepresentativeFragmentTestFeaturesNV( Bool32 representativeFragmentTest_ = 0 )
-      : representativeFragmentTest( representativeFragmentTest_ )
-    {
-    }
-
-    PhysicalDeviceRepresentativeFragmentTestFeaturesNV( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceRepresentativeFragmentTestFeaturesNV ) );
-    }
-
-    PhysicalDeviceRepresentativeFragmentTestFeaturesNV& operator=( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceRepresentativeFragmentTestFeaturesNV ) );
-      return *this;
-    }
-    PhysicalDeviceRepresentativeFragmentTestFeaturesNV& setPNext( void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    PhysicalDeviceRepresentativeFragmentTestFeaturesNV& setRepresentativeFragmentTest( Bool32 representativeFragmentTest_ )
-    {
-      representativeFragmentTest = representativeFragmentTest_;
-      return *this;
-    }
-
-    operator VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const&() const
-    {
-      return *reinterpret_cast<const VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV*>(this);
-    }
-
-    operator VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV &()
-    {
-      return *reinterpret_cast<VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV*>(this);
-    }
-
-    bool operator==( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( representativeFragmentTest == rhs.representativeFragmentTest );
-    }
-
-    bool operator!=( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureKHR;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eAccelerationStructureKHR;
 
   public:
-    void* pNext = nullptr;
-    Bool32 representativeFragmentTest;
-  };
-  static_assert( sizeof( PhysicalDeviceRepresentativeFragmentTestFeaturesNV ) == sizeof( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV ), "struct and wrapper have different size!" );
+    VULKAN_HPP_CONSTEXPR AccelerationStructureKHR() VULKAN_HPP_NOEXCEPT
+      : m_accelerationStructureKHR(VK_NULL_HANDLE)
+    {}
 
-  struct PipelineRepresentativeFragmentTestStateCreateInfoNV
-  {
-    PipelineRepresentativeFragmentTestStateCreateInfoNV( Bool32 representativeFragmentTestEnable_ = 0 )
-      : representativeFragmentTestEnable( representativeFragmentTestEnable_ )
-    {
-    }
+    VULKAN_HPP_CONSTEXPR AccelerationStructureKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_accelerationStructureKHR(VK_NULL_HANDLE)
+    {}
 
-    PipelineRepresentativeFragmentTestStateCreateInfoNV( VkPipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PipelineRepresentativeFragmentTestStateCreateInfoNV ) );
-    }
+    VULKAN_HPP_TYPESAFE_EXPLICIT AccelerationStructureKHR( VkAccelerationStructureKHR accelerationStructureKHR ) VULKAN_HPP_NOEXCEPT
+      : m_accelerationStructureKHR( accelerationStructureKHR )
+    {}
 
-    PipelineRepresentativeFragmentTestStateCreateInfoNV& operator=( VkPipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs )
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    AccelerationStructureKHR & operator=(VkAccelerationStructureKHR accelerationStructureKHR) VULKAN_HPP_NOEXCEPT
     {
-      memcpy( this, &rhs, sizeof( PipelineRepresentativeFragmentTestStateCreateInfoNV ) );
+      m_accelerationStructureKHR = accelerationStructureKHR;
       return *this;
     }
-    PipelineRepresentativeFragmentTestStateCreateInfoNV& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
+#endif
 
-    PipelineRepresentativeFragmentTestStateCreateInfoNV& setRepresentativeFragmentTestEnable( Bool32 representativeFragmentTestEnable_ )
+    AccelerationStructureKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
     {
-      representativeFragmentTestEnable = representativeFragmentTestEnable_;
+      m_accelerationStructureKHR = VK_NULL_HANDLE;
       return *this;
     }
 
-    operator VkPipelineRepresentativeFragmentTestStateCreateInfoNV const&() const
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( AccelerationStructureKHR const& ) const = default;
+#else
+    bool operator==( AccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkPipelineRepresentativeFragmentTestStateCreateInfoNV*>(this);
+      return m_accelerationStructureKHR == rhs.m_accelerationStructureKHR;
     }
 
-    operator VkPipelineRepresentativeFragmentTestStateCreateInfoNV &()
+    bool operator!=(AccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkPipelineRepresentativeFragmentTestStateCreateInfoNV*>(this);
+      return m_accelerationStructureKHR != rhs.m_accelerationStructureKHR;
     }
 
-    bool operator==( PipelineRepresentativeFragmentTestStateCreateInfoNV const& rhs ) const
+    bool operator<(AccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( representativeFragmentTestEnable == rhs.representativeFragmentTestEnable );
+      return m_accelerationStructureKHR < rhs.m_accelerationStructureKHR;
     }
+#endif
 
-    bool operator!=( PipelineRepresentativeFragmentTestStateCreateInfoNV const& rhs ) const
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkAccelerationStructureKHR() const VULKAN_HPP_NOEXCEPT
     {
-      return !operator==( rhs );
+      return m_accelerationStructureKHR;
     }
 
-  private:
-    StructureType sType = StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV;
-
-  public:
-    const void* pNext = nullptr;
-    Bool32 representativeFragmentTestEnable;
-  };
-  static_assert( sizeof( PipelineRepresentativeFragmentTestStateCreateInfoNV ) == sizeof( VkPipelineRepresentativeFragmentTestStateCreateInfoNV ), "struct and wrapper have different size!" );
-
-  struct PhysicalDeviceExclusiveScissorFeaturesNV
-  {
-    PhysicalDeviceExclusiveScissorFeaturesNV( Bool32 exclusiveScissor_ = 0 )
-      : exclusiveScissor( exclusiveScissor_ )
-    {
-    }
-
-    PhysicalDeviceExclusiveScissorFeaturesNV( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceExclusiveScissorFeaturesNV ) );
-    }
-
-    PhysicalDeviceExclusiveScissorFeaturesNV& operator=( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceExclusiveScissorFeaturesNV ) );
-      return *this;
-    }
-    PhysicalDeviceExclusiveScissorFeaturesNV& setPNext( void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    PhysicalDeviceExclusiveScissorFeaturesNV& setExclusiveScissor( Bool32 exclusiveScissor_ )
-    {
-      exclusiveScissor = exclusiveScissor_;
-      return *this;
-    }
-
-    operator VkPhysicalDeviceExclusiveScissorFeaturesNV const&() const
-    {
-      return *reinterpret_cast<const VkPhysicalDeviceExclusiveScissorFeaturesNV*>(this);
-    }
-
-    operator VkPhysicalDeviceExclusiveScissorFeaturesNV &()
-    {
-      return *reinterpret_cast<VkPhysicalDeviceExclusiveScissorFeaturesNV*>(this);
-    }
-
-    bool operator==( PhysicalDeviceExclusiveScissorFeaturesNV const& rhs ) const
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
     {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( exclusiveScissor == rhs.exclusiveScissor );
+      return m_accelerationStructureKHR != VK_NULL_HANDLE;
     }
 
-    bool operator!=( PhysicalDeviceExclusiveScissorFeaturesNV const& rhs ) const
+    bool operator!() const VULKAN_HPP_NOEXCEPT
     {
-      return !operator==( rhs );
+      return m_accelerationStructureKHR == VK_NULL_HANDLE;
     }
 
   private:
-    StructureType sType = StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV;
-
-  public:
-    void* pNext = nullptr;
-    Bool32 exclusiveScissor;
+    VkAccelerationStructureKHR m_accelerationStructureKHR;
   };
-  static_assert( sizeof( PhysicalDeviceExclusiveScissorFeaturesNV ) == sizeof( VkPhysicalDeviceExclusiveScissorFeaturesNV ), "struct and wrapper have different size!" );
+  static_assert( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR ) == sizeof( VkAccelerationStructureKHR ), "handle and wrapper have different size!" );
 
-  struct PipelineViewportExclusiveScissorStateCreateInfoNV
+  template <>
+  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eAccelerationStructureKHR>
   {
-    PipelineViewportExclusiveScissorStateCreateInfoNV( uint32_t exclusiveScissorCount_ = 0,
-                                                       const Rect2D* pExclusiveScissors_ = nullptr )
-      : exclusiveScissorCount( exclusiveScissorCount_ )
-      , pExclusiveScissors( pExclusiveScissors_ )
-    {
-    }
-
-    PipelineViewportExclusiveScissorStateCreateInfoNV( VkPipelineViewportExclusiveScissorStateCreateInfoNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PipelineViewportExclusiveScissorStateCreateInfoNV ) );
-    }
-
-    PipelineViewportExclusiveScissorStateCreateInfoNV& operator=( VkPipelineViewportExclusiveScissorStateCreateInfoNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PipelineViewportExclusiveScissorStateCreateInfoNV ) );
-      return *this;
-    }
-    PipelineViewportExclusiveScissorStateCreateInfoNV& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    PipelineViewportExclusiveScissorStateCreateInfoNV& setExclusiveScissorCount( uint32_t exclusiveScissorCount_ )
-    {
-      exclusiveScissorCount = exclusiveScissorCount_;
-      return *this;
-    }
-
-    PipelineViewportExclusiveScissorStateCreateInfoNV& setPExclusiveScissors( const Rect2D* pExclusiveScissors_ )
-    {
-      pExclusiveScissors = pExclusiveScissors_;
-      return *this;
-    }
-
-    operator VkPipelineViewportExclusiveScissorStateCreateInfoNV const&() const
-    {
-      return *reinterpret_cast<const VkPipelineViewportExclusiveScissorStateCreateInfoNV*>(this);
-    }
-
-    operator VkPipelineViewportExclusiveScissorStateCreateInfoNV &()
-    {
-      return *reinterpret_cast<VkPipelineViewportExclusiveScissorStateCreateInfoNV*>(this);
-    }
-
-    bool operator==( PipelineViewportExclusiveScissorStateCreateInfoNV const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( exclusiveScissorCount == rhs.exclusiveScissorCount )
-          && ( pExclusiveScissors == rhs.pExclusiveScissors );
-    }
-
-    bool operator!=( PipelineViewportExclusiveScissorStateCreateInfoNV const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV;
-
-  public:
-    const void* pNext = nullptr;
-    uint32_t exclusiveScissorCount;
-    const Rect2D* pExclusiveScissors;
+    using type = VULKAN_HPP_NAMESPACE::AccelerationStructureKHR;
   };
-  static_assert( sizeof( PipelineViewportExclusiveScissorStateCreateInfoNV ) == sizeof( VkPipelineViewportExclusiveScissorStateCreateInfoNV ), "struct and wrapper have different size!" );
 
-  struct PhysicalDeviceCornerSampledImageFeaturesNV
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureKHR>
   {
-    PhysicalDeviceCornerSampledImageFeaturesNV( Bool32 cornerSampledImage_ = 0 )
-      : cornerSampledImage( cornerSampledImage_ )
-    {
-    }
-
-    PhysicalDeviceCornerSampledImageFeaturesNV( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceCornerSampledImageFeaturesNV ) );
-    }
-
-    PhysicalDeviceCornerSampledImageFeaturesNV& operator=( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceCornerSampledImageFeaturesNV ) );
-      return *this;
-    }
-    PhysicalDeviceCornerSampledImageFeaturesNV& setPNext( void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    PhysicalDeviceCornerSampledImageFeaturesNV& setCornerSampledImage( Bool32 cornerSampledImage_ )
-    {
-      cornerSampledImage = cornerSampledImage_;
-      return *this;
-    }
-
-    operator VkPhysicalDeviceCornerSampledImageFeaturesNV const&() const
-    {
-      return *reinterpret_cast<const VkPhysicalDeviceCornerSampledImageFeaturesNV*>(this);
-    }
-
-    operator VkPhysicalDeviceCornerSampledImageFeaturesNV &()
-    {
-      return *reinterpret_cast<VkPhysicalDeviceCornerSampledImageFeaturesNV*>(this);
-    }
-
-    bool operator==( PhysicalDeviceCornerSampledImageFeaturesNV const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( cornerSampledImage == rhs.cornerSampledImage );
-    }
-
-    bool operator!=( PhysicalDeviceCornerSampledImageFeaturesNV const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV;
-
-  public:
-    void* pNext = nullptr;
-    Bool32 cornerSampledImage;
+    using Type = VULKAN_HPP_NAMESPACE::AccelerationStructureKHR;
   };
-  static_assert( sizeof( PhysicalDeviceCornerSampledImageFeaturesNV ) == sizeof( VkPhysicalDeviceCornerSampledImageFeaturesNV ), "struct and wrapper have different size!" );
 
-  struct PhysicalDeviceComputeShaderDerivativesFeaturesNV
-  {
-    PhysicalDeviceComputeShaderDerivativesFeaturesNV( Bool32 computeDerivativeGroupQuads_ = 0,
-                                                      Bool32 computeDerivativeGroupLinear_ = 0 )
-      : computeDerivativeGroupQuads( computeDerivativeGroupQuads_ )
-      , computeDerivativeGroupLinear( computeDerivativeGroupLinear_ )
-    {
-    }
 
-    PhysicalDeviceComputeShaderDerivativesFeaturesNV( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceComputeShaderDerivativesFeaturesNV ) );
-    }
-
-    PhysicalDeviceComputeShaderDerivativesFeaturesNV& operator=( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceComputeShaderDerivativesFeaturesNV ) );
-      return *this;
-    }
-    PhysicalDeviceComputeShaderDerivativesFeaturesNV& setPNext( void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    PhysicalDeviceComputeShaderDerivativesFeaturesNV& setComputeDerivativeGroupQuads( Bool32 computeDerivativeGroupQuads_ )
-    {
-      computeDerivativeGroupQuads = computeDerivativeGroupQuads_;
-      return *this;
-    }
-
-    PhysicalDeviceComputeShaderDerivativesFeaturesNV& setComputeDerivativeGroupLinear( Bool32 computeDerivativeGroupLinear_ )
-    {
-      computeDerivativeGroupLinear = computeDerivativeGroupLinear_;
-      return *this;
-    }
-
-    operator VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const&() const
-    {
-      return *reinterpret_cast<const VkPhysicalDeviceComputeShaderDerivativesFeaturesNV*>(this);
-    }
-
-    operator VkPhysicalDeviceComputeShaderDerivativesFeaturesNV &()
-    {
-      return *reinterpret_cast<VkPhysicalDeviceComputeShaderDerivativesFeaturesNV*>(this);
-    }
-
-    bool operator==( PhysicalDeviceComputeShaderDerivativesFeaturesNV const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( computeDerivativeGroupQuads == rhs.computeDerivativeGroupQuads )
-          && ( computeDerivativeGroupLinear == rhs.computeDerivativeGroupLinear );
-    }
-
-    bool operator!=( PhysicalDeviceComputeShaderDerivativesFeaturesNV const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV;
-
-  public:
-    void* pNext = nullptr;
-    Bool32 computeDerivativeGroupQuads;
-    Bool32 computeDerivativeGroupLinear;
-  };
-  static_assert( sizeof( PhysicalDeviceComputeShaderDerivativesFeaturesNV ) == sizeof( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV ), "struct and wrapper have different size!" );
-
-  struct PhysicalDeviceFragmentShaderBarycentricFeaturesNV
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eAccelerationStructureKHR>
   {
-    PhysicalDeviceFragmentShaderBarycentricFeaturesNV( Bool32 fragmentShaderBarycentric_ = 0 )
-      : fragmentShaderBarycentric( fragmentShaderBarycentric_ )
-    {
-    }
-
-    PhysicalDeviceFragmentShaderBarycentricFeaturesNV( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceFragmentShaderBarycentricFeaturesNV ) );
-    }
-
-    PhysicalDeviceFragmentShaderBarycentricFeaturesNV& operator=( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceFragmentShaderBarycentricFeaturesNV ) );
-      return *this;
-    }
-    PhysicalDeviceFragmentShaderBarycentricFeaturesNV& setPNext( void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    PhysicalDeviceFragmentShaderBarycentricFeaturesNV& setFragmentShaderBarycentric( Bool32 fragmentShaderBarycentric_ )
-    {
-      fragmentShaderBarycentric = fragmentShaderBarycentric_;
-      return *this;
-    }
-
-    operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const&() const
-    {
-      return *reinterpret_cast<const VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV*>(this);
-    }
-
-    operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV &()
-    {
-      return *reinterpret_cast<VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV*>(this);
-    }
-
-    bool operator==( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( fragmentShaderBarycentric == rhs.fragmentShaderBarycentric );
-    }
-
-    bool operator!=( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesNV;
-
-  public:
-    void* pNext = nullptr;
-    Bool32 fragmentShaderBarycentric;
+    using Type = VULKAN_HPP_NAMESPACE::AccelerationStructureKHR;
   };
-  static_assert( sizeof( PhysicalDeviceFragmentShaderBarycentricFeaturesNV ) == sizeof( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV ), "struct and wrapper have different size!" );
 
-  struct PhysicalDeviceShaderImageFootprintFeaturesNV
-  {
-    PhysicalDeviceShaderImageFootprintFeaturesNV( Bool32 imageFootprint_ = 0 )
-      : imageFootprint( imageFootprint_ )
-    {
-    }
 
-    PhysicalDeviceShaderImageFootprintFeaturesNV( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceShaderImageFootprintFeaturesNV ) );
-    }
-
-    PhysicalDeviceShaderImageFootprintFeaturesNV& operator=( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceShaderImageFootprintFeaturesNV ) );
-      return *this;
-    }
-    PhysicalDeviceShaderImageFootprintFeaturesNV& setPNext( void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    PhysicalDeviceShaderImageFootprintFeaturesNV& setImageFootprint( Bool32 imageFootprint_ )
-    {
-      imageFootprint = imageFootprint_;
-      return *this;
-    }
-
-    operator VkPhysicalDeviceShaderImageFootprintFeaturesNV const&() const
-    {
-      return *reinterpret_cast<const VkPhysicalDeviceShaderImageFootprintFeaturesNV*>(this);
-    }
-
-    operator VkPhysicalDeviceShaderImageFootprintFeaturesNV &()
-    {
-      return *reinterpret_cast<VkPhysicalDeviceShaderImageFootprintFeaturesNV*>(this);
-    }
-
-    bool operator==( PhysicalDeviceShaderImageFootprintFeaturesNV const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( imageFootprint == rhs.imageFootprint );
-    }
-
-    bool operator!=( PhysicalDeviceShaderImageFootprintFeaturesNV const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV;
-
-  public:
-    void* pNext = nullptr;
-    Bool32 imageFootprint;
-  };
-  static_assert( sizeof( PhysicalDeviceShaderImageFootprintFeaturesNV ) == sizeof( VkPhysicalDeviceShaderImageFootprintFeaturesNV ), "struct and wrapper have different size!" );
-
-  struct PhysicalDeviceShadingRateImageFeaturesNV
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR>
   {
-    PhysicalDeviceShadingRateImageFeaturesNV( Bool32 shadingRateImage_ = 0,
-                                              Bool32 shadingRateCoarseSampleOrder_ = 0 )
-      : shadingRateImage( shadingRateImage_ )
-      , shadingRateCoarseSampleOrder( shadingRateCoarseSampleOrder_ )
-    {
-    }
-
-    PhysicalDeviceShadingRateImageFeaturesNV( VkPhysicalDeviceShadingRateImageFeaturesNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceShadingRateImageFeaturesNV ) );
-    }
-
-    PhysicalDeviceShadingRateImageFeaturesNV& operator=( VkPhysicalDeviceShadingRateImageFeaturesNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceShadingRateImageFeaturesNV ) );
-      return *this;
-    }
-    PhysicalDeviceShadingRateImageFeaturesNV& setPNext( void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    PhysicalDeviceShadingRateImageFeaturesNV& setShadingRateImage( Bool32 shadingRateImage_ )
-    {
-      shadingRateImage = shadingRateImage_;
-      return *this;
-    }
-
-    PhysicalDeviceShadingRateImageFeaturesNV& setShadingRateCoarseSampleOrder( Bool32 shadingRateCoarseSampleOrder_ )
-    {
-      shadingRateCoarseSampleOrder = shadingRateCoarseSampleOrder_;
-      return *this;
-    }
-
-    operator VkPhysicalDeviceShadingRateImageFeaturesNV const&() const
-    {
-      return *reinterpret_cast<const VkPhysicalDeviceShadingRateImageFeaturesNV*>(this);
-    }
-
-    operator VkPhysicalDeviceShadingRateImageFeaturesNV &()
-    {
-      return *reinterpret_cast<VkPhysicalDeviceShadingRateImageFeaturesNV*>(this);
-    }
-
-    bool operator==( PhysicalDeviceShadingRateImageFeaturesNV const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( shadingRateImage == rhs.shadingRateImage )
-          && ( shadingRateCoarseSampleOrder == rhs.shadingRateCoarseSampleOrder );
-    }
-
-    bool operator!=( PhysicalDeviceShadingRateImageFeaturesNV const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePhysicalDeviceShadingRateImageFeaturesNV;
-
-  public:
-    void* pNext = nullptr;
-    Bool32 shadingRateImage;
-    Bool32 shadingRateCoarseSampleOrder;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
   };
-  static_assert( sizeof( PhysicalDeviceShadingRateImageFeaturesNV ) == sizeof( VkPhysicalDeviceShadingRateImageFeaturesNV ), "struct and wrapper have different size!" );
+  using AccelerationStructureNV = AccelerationStructureKHR;
 
-  struct PhysicalDeviceShadingRateImagePropertiesNV
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+  union DeviceOrHostAddressConstKHR
   {
-    operator VkPhysicalDeviceShadingRateImagePropertiesNV const&() const
+    DeviceOrHostAddressConstKHR( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const& rhs ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkPhysicalDeviceShadingRateImagePropertiesNV*>(this);
+      memcpy( static_cast<void*>(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR ) );
     }
 
-    operator VkPhysicalDeviceShadingRateImagePropertiesNV &()
-    {
-      return *reinterpret_cast<VkPhysicalDeviceShadingRateImagePropertiesNV*>(this);
-    }
+    DeviceOrHostAddressConstKHR( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} )
+      : deviceAddress( deviceAddress_ )
+    {}
 
-    bool operator==( PhysicalDeviceShadingRateImagePropertiesNV const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( shadingRateTexelSize == rhs.shadingRateTexelSize )
-          && ( shadingRatePaletteSize == rhs.shadingRatePaletteSize )
-          && ( shadingRateMaxCoarseSamples == rhs.shadingRateMaxCoarseSamples );
-    }
+    DeviceOrHostAddressConstKHR( const void* hostAddress_ )
+      : hostAddress( hostAddress_ )
+    {}
 
-    bool operator!=( PhysicalDeviceShadingRateImagePropertiesNV const& rhs ) const
+    DeviceOrHostAddressConstKHR & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
     {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePhysicalDeviceShadingRateImagePropertiesNV;
-
-  public:
-    void* pNext = nullptr;
-    Extent2D shadingRateTexelSize;
-    uint32_t shadingRatePaletteSize;
-    uint32_t shadingRateMaxCoarseSamples;
-  };
-  static_assert( sizeof( PhysicalDeviceShadingRateImagePropertiesNV ) == sizeof( VkPhysicalDeviceShadingRateImagePropertiesNV ), "struct and wrapper have different size!" );
-
-  struct PhysicalDeviceMeshShaderFeaturesNV
-  {
-    PhysicalDeviceMeshShaderFeaturesNV( Bool32 taskShader_ = 0,
-                                        Bool32 meshShader_ = 0 )
-      : taskShader( taskShader_ )
-      , meshShader( meshShader_ )
-    {
-    }
-
-    PhysicalDeviceMeshShaderFeaturesNV( VkPhysicalDeviceMeshShaderFeaturesNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceMeshShaderFeaturesNV ) );
-    }
-
-    PhysicalDeviceMeshShaderFeaturesNV& operator=( VkPhysicalDeviceMeshShaderFeaturesNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceMeshShaderFeaturesNV ) );
-      return *this;
-    }
-    PhysicalDeviceMeshShaderFeaturesNV& setPNext( void* pNext_ )
-    {
-      pNext = pNext_;
+      deviceAddress = deviceAddress_;
       return *this;
     }
 
-    PhysicalDeviceMeshShaderFeaturesNV& setTaskShader( Bool32 taskShader_ )
+    DeviceOrHostAddressConstKHR & setHostAddress( const void* hostAddress_ ) VULKAN_HPP_NOEXCEPT
     {
-      taskShader = taskShader_;
+      hostAddress = hostAddress_;
       return *this;
     }
 
-    PhysicalDeviceMeshShaderFeaturesNV& setMeshShader( Bool32 meshShader_ )
+    VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR & operator=( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      meshShader = meshShader_;
+      memcpy( static_cast<void*>(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR ) );
       return *this;
     }
 
-    operator VkPhysicalDeviceMeshShaderFeaturesNV const&() const
+    operator VkDeviceOrHostAddressConstKHR const&() const
     {
-      return *reinterpret_cast<const VkPhysicalDeviceMeshShaderFeaturesNV*>(this);
+      return *reinterpret_cast<const VkDeviceOrHostAddressConstKHR*>(this);
     }
 
-    operator VkPhysicalDeviceMeshShaderFeaturesNV &()
+    operator VkDeviceOrHostAddressConstKHR &()
     {
-      return *reinterpret_cast<VkPhysicalDeviceMeshShaderFeaturesNV*>(this);
+      return *reinterpret_cast<VkDeviceOrHostAddressConstKHR*>(this);
     }
 
-    bool operator==( PhysicalDeviceMeshShaderFeaturesNV const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( taskShader == rhs.taskShader )
-          && ( meshShader == rhs.meshShader );
-    }
-
-    bool operator!=( PhysicalDeviceMeshShaderFeaturesNV const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePhysicalDeviceMeshShaderFeaturesNV;
-
-  public:
-    void* pNext = nullptr;
-    Bool32 taskShader;
-    Bool32 meshShader;
+#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
+    VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress;
+    const void* hostAddress;
+#else
+    VkDeviceAddress deviceAddress;
+    const void* hostAddress;
+#endif  /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
   };
-  static_assert( sizeof( PhysicalDeviceMeshShaderFeaturesNV ) == sizeof( VkPhysicalDeviceMeshShaderFeaturesNV ), "struct and wrapper have different size!" );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
 
-  struct PhysicalDeviceMeshShaderPropertiesNV
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+  struct AccelerationStructureGeometryTrianglesDataKHR
   {
-    PhysicalDeviceMeshShaderPropertiesNV( uint32_t maxDrawMeshTasksCount_ = 0,
-                                          uint32_t maxTaskWorkGroupInvocations_ = 0,
-                                          std::array<uint32_t,3> const& maxTaskWorkGroupSize_ = { { 0, 0, 0 } },
-                                          uint32_t maxTaskTotalMemorySize_ = 0,
-                                          uint32_t maxTaskOutputCount_ = 0,
-                                          uint32_t maxMeshWorkGroupInvocations_ = 0,
-                                          std::array<uint32_t,3> const& maxMeshWorkGroupSize_ = { { 0, 0, 0 } },
-                                          uint32_t maxMeshTotalMemorySize_ = 0,
-                                          uint32_t maxMeshOutputVertices_ = 0,
-                                          uint32_t maxMeshOutputPrimitives_ = 0,
-                                          uint32_t maxMeshMultiviewViewCount_ = 0,
-                                          uint32_t meshOutputPerVertexGranularity_ = 0,
-                                          uint32_t meshOutputPerPrimitiveGranularity_ = 0 )
-      : maxDrawMeshTasksCount( maxDrawMeshTasksCount_ )
-      , maxTaskWorkGroupInvocations( maxTaskWorkGroupInvocations_ )
-      , maxTaskTotalMemorySize( maxTaskTotalMemorySize_ )
-      , maxTaskOutputCount( maxTaskOutputCount_ )
-      , maxMeshWorkGroupInvocations( maxMeshWorkGroupInvocations_ )
-      , maxMeshTotalMemorySize( maxMeshTotalMemorySize_ )
-      , maxMeshOutputVertices( maxMeshOutputVertices_ )
-      , maxMeshOutputPrimitives( maxMeshOutputPrimitives_ )
-      , maxMeshMultiviewViewCount( maxMeshMultiviewViewCount_ )
-      , meshOutputPerVertexGranularity( meshOutputPerVertexGranularity_ )
-      , meshOutputPerPrimitiveGranularity( meshOutputPerPrimitiveGranularity_ )
-    {
-      memcpy( &maxTaskWorkGroupSize, maxTaskWorkGroupSize_.data(), 3 * sizeof( uint32_t ) );
-      memcpy( &maxMeshWorkGroupSize, maxMeshWorkGroupSize_.data(), 3 * sizeof( uint32_t ) );
-    }
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryTrianglesDataKHR;
 
-    PhysicalDeviceMeshShaderPropertiesNV( VkPhysicalDeviceMeshShaderPropertiesNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceMeshShaderPropertiesNV ) );
-    }
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    AccelerationStructureGeometryTrianglesDataKHR(VULKAN_HPP_NAMESPACE::Format vertexFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ = {}, VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexData_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR transformData_ = {}) VULKAN_HPP_NOEXCEPT
+    : vertexFormat( vertexFormat_ ), vertexData( vertexData_ ), vertexStride( vertexStride_ ), indexType( indexType_ ), indexData( indexData_ ), transformData( transformData_ )
+    {}
 
-    PhysicalDeviceMeshShaderPropertiesNV& operator=( VkPhysicalDeviceMeshShaderPropertiesNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceMeshShaderPropertiesNV ) );
-      return *this;
-    }
-    PhysicalDeviceMeshShaderPropertiesNV& setPNext( void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
+    AccelerationStructureGeometryTrianglesDataKHR( AccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    PhysicalDeviceMeshShaderPropertiesNV& setMaxDrawMeshTasksCount( uint32_t maxDrawMeshTasksCount_ )
+    AccelerationStructureGeometryTrianglesDataKHR( VkAccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      maxDrawMeshTasksCount = maxDrawMeshTasksCount_;
-      return *this;
+      *this = rhs;
     }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    PhysicalDeviceMeshShaderPropertiesNV& setMaxTaskWorkGroupInvocations( uint32_t maxTaskWorkGroupInvocations_ )
+    AccelerationStructureGeometryTrianglesDataKHR & operator=( VkAccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      maxTaskWorkGroupInvocations = maxTaskWorkGroupInvocations_;
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR const *>( &rhs );
       return *this;
     }
 
-    PhysicalDeviceMeshShaderPropertiesNV& setMaxTaskWorkGroupSize( std::array<uint32_t,3> maxTaskWorkGroupSize_ )
+    AccelerationStructureGeometryTrianglesDataKHR & operator=( AccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      memcpy( &maxTaskWorkGroupSize, maxTaskWorkGroupSize_.data(), 3 * sizeof( uint32_t ) );
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( AccelerationStructureGeometryTrianglesDataKHR ) );
       return *this;
     }
 
-    PhysicalDeviceMeshShaderPropertiesNV& setMaxTaskTotalMemorySize( uint32_t maxTaskTotalMemorySize_ )
-    {
-      maxTaskTotalMemorySize = maxTaskTotalMemorySize_;
-      return *this;
-    }
-
-    PhysicalDeviceMeshShaderPropertiesNV& setMaxTaskOutputCount( uint32_t maxTaskOutputCount_ )
-    {
-      maxTaskOutputCount = maxTaskOutputCount_;
-      return *this;
-    }
-
-    PhysicalDeviceMeshShaderPropertiesNV& setMaxMeshWorkGroupInvocations( uint32_t maxMeshWorkGroupInvocations_ )
-    {
-      maxMeshWorkGroupInvocations = maxMeshWorkGroupInvocations_;
-      return *this;
-    }
-
-    PhysicalDeviceMeshShaderPropertiesNV& setMaxMeshWorkGroupSize( std::array<uint32_t,3> maxMeshWorkGroupSize_ )
-    {
-      memcpy( &maxMeshWorkGroupSize, maxMeshWorkGroupSize_.data(), 3 * sizeof( uint32_t ) );
-      return *this;
-    }
-
-    PhysicalDeviceMeshShaderPropertiesNV& setMaxMeshTotalMemorySize( uint32_t maxMeshTotalMemorySize_ )
-    {
-      maxMeshTotalMemorySize = maxMeshTotalMemorySize_;
-      return *this;
-    }
-
-    PhysicalDeviceMeshShaderPropertiesNV& setMaxMeshOutputVertices( uint32_t maxMeshOutputVertices_ )
-    {
-      maxMeshOutputVertices = maxMeshOutputVertices_;
-      return *this;
-    }
-
-    PhysicalDeviceMeshShaderPropertiesNV& setMaxMeshOutputPrimitives( uint32_t maxMeshOutputPrimitives_ )
-    {
-      maxMeshOutputPrimitives = maxMeshOutputPrimitives_;
-      return *this;
-    }
-
-    PhysicalDeviceMeshShaderPropertiesNV& setMaxMeshMultiviewViewCount( uint32_t maxMeshMultiviewViewCount_ )
-    {
-      maxMeshMultiviewViewCount = maxMeshMultiviewViewCount_;
-      return *this;
-    }
-
-    PhysicalDeviceMeshShaderPropertiesNV& setMeshOutputPerVertexGranularity( uint32_t meshOutputPerVertexGranularity_ )
-    {
-      meshOutputPerVertexGranularity = meshOutputPerVertexGranularity_;
-      return *this;
-    }
-
-    PhysicalDeviceMeshShaderPropertiesNV& setMeshOutputPerPrimitiveGranularity( uint32_t meshOutputPerPrimitiveGranularity_ )
-    {
-      meshOutputPerPrimitiveGranularity = meshOutputPerPrimitiveGranularity_;
-      return *this;
-    }
-
-    operator VkPhysicalDeviceMeshShaderPropertiesNV const&() const
-    {
-      return *reinterpret_cast<const VkPhysicalDeviceMeshShaderPropertiesNV*>(this);
-    }
-
-    operator VkPhysicalDeviceMeshShaderPropertiesNV &()
-    {
-      return *reinterpret_cast<VkPhysicalDeviceMeshShaderPropertiesNV*>(this);
-    }
-
-    bool operator==( PhysicalDeviceMeshShaderPropertiesNV const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( maxDrawMeshTasksCount == rhs.maxDrawMeshTasksCount )
-          && ( maxTaskWorkGroupInvocations == rhs.maxTaskWorkGroupInvocations )
-          && ( memcmp( maxTaskWorkGroupSize, rhs.maxTaskWorkGroupSize, 3 * sizeof( uint32_t ) ) == 0 )
-          && ( maxTaskTotalMemorySize == rhs.maxTaskTotalMemorySize )
-          && ( maxTaskOutputCount == rhs.maxTaskOutputCount )
-          && ( maxMeshWorkGroupInvocations == rhs.maxMeshWorkGroupInvocations )
-          && ( memcmp( maxMeshWorkGroupSize, rhs.maxMeshWorkGroupSize, 3 * sizeof( uint32_t ) ) == 0 )
-          && ( maxMeshTotalMemorySize == rhs.maxMeshTotalMemorySize )
-          && ( maxMeshOutputVertices == rhs.maxMeshOutputVertices )
-          && ( maxMeshOutputPrimitives == rhs.maxMeshOutputPrimitives )
-          && ( maxMeshMultiviewViewCount == rhs.maxMeshMultiviewViewCount )
-          && ( meshOutputPerVertexGranularity == rhs.meshOutputPerVertexGranularity )
-          && ( meshOutputPerPrimitiveGranularity == rhs.meshOutputPerPrimitiveGranularity );
-    }
-
-    bool operator!=( PhysicalDeviceMeshShaderPropertiesNV const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePhysicalDeviceMeshShaderPropertiesNV;
-
-  public:
-    void* pNext = nullptr;
-    uint32_t maxDrawMeshTasksCount;
-    uint32_t maxTaskWorkGroupInvocations;
-    uint32_t maxTaskWorkGroupSize[3];
-    uint32_t maxTaskTotalMemorySize;
-    uint32_t maxTaskOutputCount;
-    uint32_t maxMeshWorkGroupInvocations;
-    uint32_t maxMeshWorkGroupSize[3];
-    uint32_t maxMeshTotalMemorySize;
-    uint32_t maxMeshOutputVertices;
-    uint32_t maxMeshOutputPrimitives;
-    uint32_t maxMeshMultiviewViewCount;
-    uint32_t meshOutputPerVertexGranularity;
-    uint32_t meshOutputPerPrimitiveGranularity;
-  };
-  static_assert( sizeof( PhysicalDeviceMeshShaderPropertiesNV ) == sizeof( VkPhysicalDeviceMeshShaderPropertiesNV ), "struct and wrapper have different size!" );
-
-  struct GeometryTrianglesNV
-  {
-    GeometryTrianglesNV( Buffer vertexData_ = Buffer(),
-                         DeviceSize vertexOffset_ = 0,
-                         uint32_t vertexCount_ = 0,
-                         DeviceSize vertexStride_ = 0,
-                         Format vertexFormat_ = Format::eUndefined,
-                         Buffer indexData_ = Buffer(),
-                         DeviceSize indexOffset_ = 0,
-                         uint32_t indexCount_ = 0,
-                         IndexType indexType_ = IndexType::eUint16,
-                         Buffer transformData_ = Buffer(),
-                         DeviceSize transformOffset_ = 0 )
-      : vertexData( vertexData_ )
-      , vertexOffset( vertexOffset_ )
-      , vertexCount( vertexCount_ )
-      , vertexStride( vertexStride_ )
-      , vertexFormat( vertexFormat_ )
-      , indexData( indexData_ )
-      , indexOffset( indexOffset_ )
-      , indexCount( indexCount_ )
-      , indexType( indexType_ )
-      , transformData( transformData_ )
-      , transformOffset( transformOffset_ )
-    {
-    }
-
-    GeometryTrianglesNV( VkGeometryTrianglesNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( GeometryTrianglesNV ) );
-    }
-
-    GeometryTrianglesNV& operator=( VkGeometryTrianglesNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( GeometryTrianglesNV ) );
-      return *this;
-    }
-    GeometryTrianglesNV& setPNext( const void* pNext_ )
+    AccelerationStructureGeometryTrianglesDataKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
       pNext = pNext_;
       return *this;
     }
 
-    GeometryTrianglesNV& setVertexData( Buffer vertexData_ )
-    {
-      vertexData = vertexData_;
-      return *this;
-    }
-
-    GeometryTrianglesNV& setVertexOffset( DeviceSize vertexOffset_ )
-    {
-      vertexOffset = vertexOffset_;
-      return *this;
-    }
-
-    GeometryTrianglesNV& setVertexCount( uint32_t vertexCount_ )
-    {
-      vertexCount = vertexCount_;
-      return *this;
-    }
-
-    GeometryTrianglesNV& setVertexStride( DeviceSize vertexStride_ )
-    {
-      vertexStride = vertexStride_;
-      return *this;
-    }
-
-    GeometryTrianglesNV& setVertexFormat( Format vertexFormat_ )
+    AccelerationStructureGeometryTrianglesDataKHR & setVertexFormat( VULKAN_HPP_NAMESPACE::Format vertexFormat_ ) VULKAN_HPP_NOEXCEPT
     {
       vertexFormat = vertexFormat_;
       return *this;
     }
 
-    GeometryTrianglesNV& setIndexData( Buffer indexData_ )
+    AccelerationStructureGeometryTrianglesDataKHR & setVertexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & vertexData_ ) VULKAN_HPP_NOEXCEPT
     {
-      indexData = indexData_;
+      vertexData = vertexData_;
       return *this;
     }
 
-    GeometryTrianglesNV& setIndexOffset( DeviceSize indexOffset_ )
+    AccelerationStructureGeometryTrianglesDataKHR & setVertexStride( VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ ) VULKAN_HPP_NOEXCEPT
     {
-      indexOffset = indexOffset_;
+      vertexStride = vertexStride_;
       return *this;
     }
 
-    GeometryTrianglesNV& setIndexCount( uint32_t indexCount_ )
-    {
-      indexCount = indexCount_;
-      return *this;
-    }
-
-    GeometryTrianglesNV& setIndexType( IndexType indexType_ )
+    AccelerationStructureGeometryTrianglesDataKHR & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT
     {
       indexType = indexType_;
       return *this;
     }
 
-    GeometryTrianglesNV& setTransformData( Buffer transformData_ )
+    AccelerationStructureGeometryTrianglesDataKHR & setIndexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & indexData_ ) VULKAN_HPP_NOEXCEPT
+    {
+      indexData = indexData_;
+      return *this;
+    }
+
+    AccelerationStructureGeometryTrianglesDataKHR & setTransformData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & transformData_ ) VULKAN_HPP_NOEXCEPT
     {
       transformData = transformData_;
       return *this;
     }
 
-    GeometryTrianglesNV& setTransformOffset( DeviceSize transformOffset_ )
+
+    operator VkAccelerationStructureGeometryTrianglesDataKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAccelerationStructureGeometryTrianglesDataKHR*>( this );
+    }
+
+    operator VkAccelerationStructureGeometryTrianglesDataKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAccelerationStructureGeometryTrianglesDataKHR*>( this );
+    }
+
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryTrianglesDataKHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Format vertexFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+    VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize vertexStride = {};
+    VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16;
+    VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexData = {};
+    VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR transformData = {};
+
+  };
+  static_assert( sizeof( AccelerationStructureGeometryTrianglesDataKHR ) == sizeof( VkAccelerationStructureGeometryTrianglesDataKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<AccelerationStructureGeometryTrianglesDataKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eAccelerationStructureGeometryTrianglesDataKHR>
+  {
+    using Type = AccelerationStructureGeometryTrianglesDataKHR;
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+  struct AccelerationStructureGeometryAabbsDataKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryAabbsDataKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    AccelerationStructureGeometryAabbsDataKHR(VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize stride_ = {}) VULKAN_HPP_NOEXCEPT
+    : data( data_ ), stride( stride_ )
+    {}
+
+    AccelerationStructureGeometryAabbsDataKHR( AccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AccelerationStructureGeometryAabbsDataKHR( VkAccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    AccelerationStructureGeometryAabbsDataKHR & operator=( VkAccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR const *>( &rhs );
+      return *this;
+    }
+
+    AccelerationStructureGeometryAabbsDataKHR & operator=( AccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( AccelerationStructureGeometryAabbsDataKHR ) );
+      return *this;
+    }
+
+    AccelerationStructureGeometryAabbsDataKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    AccelerationStructureGeometryAabbsDataKHR & setData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & data_ ) VULKAN_HPP_NOEXCEPT
+    {
+      data = data_;
+      return *this;
+    }
+
+    AccelerationStructureGeometryAabbsDataKHR & setStride( VULKAN_HPP_NAMESPACE::DeviceSize stride_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stride = stride_;
+      return *this;
+    }
+
+
+    operator VkAccelerationStructureGeometryAabbsDataKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAccelerationStructureGeometryAabbsDataKHR*>( this );
+    }
+
+    operator VkAccelerationStructureGeometryAabbsDataKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAccelerationStructureGeometryAabbsDataKHR*>( this );
+    }
+
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryAabbsDataKHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize stride = {};
+
+  };
+  static_assert( sizeof( AccelerationStructureGeometryAabbsDataKHR ) == sizeof( VkAccelerationStructureGeometryAabbsDataKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<AccelerationStructureGeometryAabbsDataKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eAccelerationStructureGeometryAabbsDataKHR>
+  {
+    using Type = AccelerationStructureGeometryAabbsDataKHR;
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+  struct AccelerationStructureGeometryInstancesDataKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryInstancesDataKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    AccelerationStructureGeometryInstancesDataKHR(VULKAN_HPP_NAMESPACE::Bool32 arrayOfPointers_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data_ = {}) VULKAN_HPP_NOEXCEPT
+    : arrayOfPointers( arrayOfPointers_ ), data( data_ )
+    {}
+
+    AccelerationStructureGeometryInstancesDataKHR( AccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AccelerationStructureGeometryInstancesDataKHR( VkAccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    AccelerationStructureGeometryInstancesDataKHR & operator=( VkAccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR const *>( &rhs );
+      return *this;
+    }
+
+    AccelerationStructureGeometryInstancesDataKHR & operator=( AccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( AccelerationStructureGeometryInstancesDataKHR ) );
+      return *this;
+    }
+
+    AccelerationStructureGeometryInstancesDataKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    AccelerationStructureGeometryInstancesDataKHR & setArrayOfPointers( VULKAN_HPP_NAMESPACE::Bool32 arrayOfPointers_ ) VULKAN_HPP_NOEXCEPT
+    {
+      arrayOfPointers = arrayOfPointers_;
+      return *this;
+    }
+
+    AccelerationStructureGeometryInstancesDataKHR & setData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & data_ ) VULKAN_HPP_NOEXCEPT
+    {
+      data = data_;
+      return *this;
+    }
+
+
+    operator VkAccelerationStructureGeometryInstancesDataKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAccelerationStructureGeometryInstancesDataKHR*>( this );
+    }
+
+    operator VkAccelerationStructureGeometryInstancesDataKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAccelerationStructureGeometryInstancesDataKHR*>( this );
+    }
+
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryInstancesDataKHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 arrayOfPointers = {};
+    VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data = {};
+
+  };
+  static_assert( sizeof( AccelerationStructureGeometryInstancesDataKHR ) == sizeof( VkAccelerationStructureGeometryInstancesDataKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<AccelerationStructureGeometryInstancesDataKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eAccelerationStructureGeometryInstancesDataKHR>
+  {
+    using Type = AccelerationStructureGeometryInstancesDataKHR;
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+  union AccelerationStructureGeometryDataKHR
+  {
+    AccelerationStructureGeometryDataKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR const& rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void*>(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR ) );
+    }
+
+    AccelerationStructureGeometryDataKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR triangles_ = {} )
+      : triangles( triangles_ )
+    {}
+
+    AccelerationStructureGeometryDataKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR aabbs_ )
+      : aabbs( aabbs_ )
+    {}
+
+    AccelerationStructureGeometryDataKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR instances_ )
+      : instances( instances_ )
+    {}
+
+    AccelerationStructureGeometryDataKHR & setTriangles( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR const & triangles_ ) VULKAN_HPP_NOEXCEPT
+    {
+      triangles = triangles_;
+      return *this;
+    }
+
+    AccelerationStructureGeometryDataKHR & setAabbs( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR const & aabbs_ ) VULKAN_HPP_NOEXCEPT
+    {
+      aabbs = aabbs_;
+      return *this;
+    }
+
+    AccelerationStructureGeometryDataKHR & setInstances( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR const & instances_ ) VULKAN_HPP_NOEXCEPT
+    {
+      instances = instances_;
+      return *this;
+    }
+
+    VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR & operator=( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void*>(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR ) );
+      return *this;
+    }
+
+    operator VkAccelerationStructureGeometryDataKHR const&() const
+    {
+      return *reinterpret_cast<const VkAccelerationStructureGeometryDataKHR*>(this);
+    }
+
+    operator VkAccelerationStructureGeometryDataKHR &()
+    {
+      return *reinterpret_cast<VkAccelerationStructureGeometryDataKHR*>(this);
+    }
+
+#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
+    VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR triangles;
+    VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR aabbs;
+    VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR instances;
+#else
+    VkAccelerationStructureGeometryTrianglesDataKHR triangles;
+    VkAccelerationStructureGeometryAabbsDataKHR aabbs;
+    VkAccelerationStructureGeometryInstancesDataKHR instances;
+#endif  /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+  struct AccelerationStructureGeometryKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    AccelerationStructureGeometryKHR(VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles, VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR geometry_ = {}, VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ = {}) VULKAN_HPP_NOEXCEPT
+    : geometryType( geometryType_ ), geometry( geometry_ ), flags( flags_ )
+    {}
+
+    AccelerationStructureGeometryKHR( AccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AccelerationStructureGeometryKHR( VkAccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    AccelerationStructureGeometryKHR & operator=( VkAccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR const *>( &rhs );
+      return *this;
+    }
+
+    AccelerationStructureGeometryKHR & operator=( AccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( AccelerationStructureGeometryKHR ) );
+      return *this;
+    }
+
+    AccelerationStructureGeometryKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    AccelerationStructureGeometryKHR & setGeometryType( VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      geometryType = geometryType_;
+      return *this;
+    }
+
+    AccelerationStructureGeometryKHR & setGeometry( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR const & geometry_ ) VULKAN_HPP_NOEXCEPT
+    {
+      geometry = geometry_;
+      return *this;
+    }
+
+    AccelerationStructureGeometryKHR & setFlags( VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+
+    operator VkAccelerationStructureGeometryKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAccelerationStructureGeometryKHR*>( this );
+    }
+
+    operator VkAccelerationStructureGeometryKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAccelerationStructureGeometryKHR*>( this );
+    }
+
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryKHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles;
+    VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR geometry = {};
+    VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags = {};
+
+  };
+  static_assert( sizeof( AccelerationStructureGeometryKHR ) == sizeof( VkAccelerationStructureGeometryKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<AccelerationStructureGeometryKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eAccelerationStructureGeometryKHR>
+  {
+    using Type = AccelerationStructureGeometryKHR;
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+  union DeviceOrHostAddressKHR
+  {
+    DeviceOrHostAddressKHR( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const& rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void*>(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR ) );
+    }
+
+    DeviceOrHostAddressKHR( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} )
+      : deviceAddress( deviceAddress_ )
+    {}
+
+    DeviceOrHostAddressKHR( void* hostAddress_ )
+      : hostAddress( hostAddress_ )
+    {}
+
+    DeviceOrHostAddressKHR & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
+    {
+      deviceAddress = deviceAddress_;
+      return *this;
+    }
+
+    DeviceOrHostAddressKHR & setHostAddress( void* hostAddress_ ) VULKAN_HPP_NOEXCEPT
+    {
+      hostAddress = hostAddress_;
+      return *this;
+    }
+
+    VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR & operator=( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void*>(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR ) );
+      return *this;
+    }
+
+    operator VkDeviceOrHostAddressKHR const&() const
+    {
+      return *reinterpret_cast<const VkDeviceOrHostAddressKHR*>(this);
+    }
+
+    operator VkDeviceOrHostAddressKHR &()
+    {
+      return *reinterpret_cast<VkDeviceOrHostAddressKHR*>(this);
+    }
+
+#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
+    VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress;
+    void* hostAddress;
+#else
+    VkDeviceAddress deviceAddress;
+    void* hostAddress;
+#endif  /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+  struct AccelerationStructureBuildGeometryInfoKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureBuildGeometryInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    AccelerationStructureBuildGeometryInfoKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel, VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ = {}, VULKAN_HPP_NAMESPACE::Bool32 update_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure_ = {}, VULKAN_HPP_NAMESPACE::Bool32 geometryArrayOfPointers_ = {}, uint32_t geometryCount_ = {}, const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR* const * ppGeometries_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData_ = {}) VULKAN_HPP_NOEXCEPT
+    : type( type_ ), flags( flags_ ), update( update_ ), srcAccelerationStructure( srcAccelerationStructure_ ), dstAccelerationStructure( dstAccelerationStructure_ ), geometryArrayOfPointers( geometryArrayOfPointers_ ), geometryCount( geometryCount_ ), ppGeometries( ppGeometries_ ), scratchData( scratchData_ )
+    {}
+
+    AccelerationStructureBuildGeometryInfoKHR( AccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AccelerationStructureBuildGeometryInfoKHR( VkAccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    AccelerationStructureBuildGeometryInfoKHR & operator=( VkAccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+    AccelerationStructureBuildGeometryInfoKHR & operator=( AccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( AccelerationStructureBuildGeometryInfoKHR ) );
+      return *this;
+    }
+
+    AccelerationStructureBuildGeometryInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    AccelerationStructureBuildGeometryInfoKHR & setType( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ ) VULKAN_HPP_NOEXCEPT
+    {
+      type = type_;
+      return *this;
+    }
+
+    AccelerationStructureBuildGeometryInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    AccelerationStructureBuildGeometryInfoKHR & setUpdate( VULKAN_HPP_NAMESPACE::Bool32 update_ ) VULKAN_HPP_NOEXCEPT
+    {
+      update = update_;
+      return *this;
+    }
+
+    AccelerationStructureBuildGeometryInfoKHR & setSrcAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcAccelerationStructure = srcAccelerationStructure_;
+      return *this;
+    }
+
+    AccelerationStructureBuildGeometryInfoKHR & setDstAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstAccelerationStructure = dstAccelerationStructure_;
+      return *this;
+    }
+
+    AccelerationStructureBuildGeometryInfoKHR & setGeometryArrayOfPointers( VULKAN_HPP_NAMESPACE::Bool32 geometryArrayOfPointers_ ) VULKAN_HPP_NOEXCEPT
+    {
+      geometryArrayOfPointers = geometryArrayOfPointers_;
+      return *this;
+    }
+
+    AccelerationStructureBuildGeometryInfoKHR & setGeometryCount( uint32_t geometryCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      geometryCount = geometryCount_;
+      return *this;
+    }
+
+    AccelerationStructureBuildGeometryInfoKHR & setPpGeometries( const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR* const * ppGeometries_ ) VULKAN_HPP_NOEXCEPT
+    {
+      ppGeometries = ppGeometries_;
+      return *this;
+    }
+
+    AccelerationStructureBuildGeometryInfoKHR & setScratchData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & scratchData_ ) VULKAN_HPP_NOEXCEPT
+    {
+      scratchData = scratchData_;
+      return *this;
+    }
+
+
+    operator VkAccelerationStructureBuildGeometryInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR*>( this );
+    }
+
+    operator VkAccelerationStructureBuildGeometryInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAccelerationStructureBuildGeometryInfoKHR*>( this );
+    }
+
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureBuildGeometryInfoKHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel;
+    VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags = {};
+    VULKAN_HPP_NAMESPACE::Bool32 update = {};
+    VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure = {};
+    VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure = {};
+    VULKAN_HPP_NAMESPACE::Bool32 geometryArrayOfPointers = {};
+    uint32_t geometryCount = {};
+    const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR* const * ppGeometries = {};
+    VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData = {};
+
+  };
+  static_assert( sizeof( AccelerationStructureBuildGeometryInfoKHR ) == sizeof( VkAccelerationStructureBuildGeometryInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<AccelerationStructureBuildGeometryInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eAccelerationStructureBuildGeometryInfoKHR>
+  {
+    using Type = AccelerationStructureBuildGeometryInfoKHR;
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+  struct AccelerationStructureBuildOffsetInfoKHR
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR AccelerationStructureBuildOffsetInfoKHR(uint32_t primitiveCount_ = {}, uint32_t primitiveOffset_ = {}, uint32_t firstVertex_ = {}, uint32_t transformOffset_ = {}) VULKAN_HPP_NOEXCEPT
+    : primitiveCount( primitiveCount_ ), primitiveOffset( primitiveOffset_ ), firstVertex( firstVertex_ ), transformOffset( transformOffset_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR AccelerationStructureBuildOffsetInfoKHR( AccelerationStructureBuildOffsetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AccelerationStructureBuildOffsetInfoKHR( VkAccelerationStructureBuildOffsetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    AccelerationStructureBuildOffsetInfoKHR & operator=( VkAccelerationStructureBuildOffsetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureBuildOffsetInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+    AccelerationStructureBuildOffsetInfoKHR & operator=( AccelerationStructureBuildOffsetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( AccelerationStructureBuildOffsetInfoKHR ) );
+      return *this;
+    }
+
+    AccelerationStructureBuildOffsetInfoKHR & setPrimitiveCount( uint32_t primitiveCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      primitiveCount = primitiveCount_;
+      return *this;
+    }
+
+    AccelerationStructureBuildOffsetInfoKHR & setPrimitiveOffset( uint32_t primitiveOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      primitiveOffset = primitiveOffset_;
+      return *this;
+    }
+
+    AccelerationStructureBuildOffsetInfoKHR & setFirstVertex( uint32_t firstVertex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      firstVertex = firstVertex_;
+      return *this;
+    }
+
+    AccelerationStructureBuildOffsetInfoKHR & setTransformOffset( uint32_t transformOffset_ ) VULKAN_HPP_NOEXCEPT
     {
       transformOffset = transformOffset_;
       return *this;
     }
 
-    operator VkGeometryTrianglesNV const&() const
+
+    operator VkAccelerationStructureBuildOffsetInfoKHR const&() const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkGeometryTrianglesNV*>(this);
+      return *reinterpret_cast<const VkAccelerationStructureBuildOffsetInfoKHR*>( this );
     }
 
-    operator VkGeometryTrianglesNV &()
+    operator VkAccelerationStructureBuildOffsetInfoKHR &() VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkGeometryTrianglesNV*>(this);
+      return *reinterpret_cast<VkAccelerationStructureBuildOffsetInfoKHR*>( this );
     }
 
-    bool operator==( GeometryTrianglesNV const& rhs ) const
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( AccelerationStructureBuildOffsetInfoKHR const& ) const = default;
+#else
+    bool operator==( AccelerationStructureBuildOffsetInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( primitiveCount == rhs.primitiveCount )
+          && ( primitiveOffset == rhs.primitiveOffset )
+          && ( firstVertex == rhs.firstVertex )
+          && ( transformOffset == rhs.transformOffset );
+    }
+
+    bool operator!=( AccelerationStructureBuildOffsetInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    uint32_t primitiveCount = {};
+    uint32_t primitiveOffset = {};
+    uint32_t firstVertex = {};
+    uint32_t transformOffset = {};
+
+  };
+  static_assert( sizeof( AccelerationStructureBuildOffsetInfoKHR ) == sizeof( VkAccelerationStructureBuildOffsetInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<AccelerationStructureBuildOffsetInfoKHR>::value, "struct wrapper is not a standard layout!" );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+  struct AccelerationStructureCreateGeometryTypeInfoKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureCreateGeometryTypeInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR AccelerationStructureCreateGeometryTypeInfoKHR(VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles, uint32_t maxPrimitiveCount_ = {}, VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16, uint32_t maxVertexCount_ = {}, VULKAN_HPP_NAMESPACE::Format vertexFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::Bool32 allowsTransforms_ = {}) VULKAN_HPP_NOEXCEPT
+    : geometryType( geometryType_ ), maxPrimitiveCount( maxPrimitiveCount_ ), indexType( indexType_ ), maxVertexCount( maxVertexCount_ ), vertexFormat( vertexFormat_ ), allowsTransforms( allowsTransforms_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR AccelerationStructureCreateGeometryTypeInfoKHR( AccelerationStructureCreateGeometryTypeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AccelerationStructureCreateGeometryTypeInfoKHR( VkAccelerationStructureCreateGeometryTypeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    AccelerationStructureCreateGeometryTypeInfoKHR & operator=( VkAccelerationStructureCreateGeometryTypeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureCreateGeometryTypeInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+    AccelerationStructureCreateGeometryTypeInfoKHR & operator=( AccelerationStructureCreateGeometryTypeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( AccelerationStructureCreateGeometryTypeInfoKHR ) );
+      return *this;
+    }
+
+    AccelerationStructureCreateGeometryTypeInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    AccelerationStructureCreateGeometryTypeInfoKHR & setGeometryType( VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      geometryType = geometryType_;
+      return *this;
+    }
+
+    AccelerationStructureCreateGeometryTypeInfoKHR & setMaxPrimitiveCount( uint32_t maxPrimitiveCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxPrimitiveCount = maxPrimitiveCount_;
+      return *this;
+    }
+
+    AccelerationStructureCreateGeometryTypeInfoKHR & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      indexType = indexType_;
+      return *this;
+    }
+
+    AccelerationStructureCreateGeometryTypeInfoKHR & setMaxVertexCount( uint32_t maxVertexCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxVertexCount = maxVertexCount_;
+      return *this;
+    }
+
+    AccelerationStructureCreateGeometryTypeInfoKHR & setVertexFormat( VULKAN_HPP_NAMESPACE::Format vertexFormat_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vertexFormat = vertexFormat_;
+      return *this;
+    }
+
+    AccelerationStructureCreateGeometryTypeInfoKHR & setAllowsTransforms( VULKAN_HPP_NAMESPACE::Bool32 allowsTransforms_ ) VULKAN_HPP_NOEXCEPT
+    {
+      allowsTransforms = allowsTransforms_;
+      return *this;
+    }
+
+
+    operator VkAccelerationStructureCreateGeometryTypeInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAccelerationStructureCreateGeometryTypeInfoKHR*>( this );
+    }
+
+    operator VkAccelerationStructureCreateGeometryTypeInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAccelerationStructureCreateGeometryTypeInfoKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( AccelerationStructureCreateGeometryTypeInfoKHR const& ) const = default;
+#else
+    bool operator==( AccelerationStructureCreateGeometryTypeInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( geometryType == rhs.geometryType )
+          && ( maxPrimitiveCount == rhs.maxPrimitiveCount )
+          && ( indexType == rhs.indexType )
+          && ( maxVertexCount == rhs.maxVertexCount )
+          && ( vertexFormat == rhs.vertexFormat )
+          && ( allowsTransforms == rhs.allowsTransforms );
+    }
+
+    bool operator!=( AccelerationStructureCreateGeometryTypeInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureCreateGeometryTypeInfoKHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles;
+    uint32_t maxPrimitiveCount = {};
+    VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16;
+    uint32_t maxVertexCount = {};
+    VULKAN_HPP_NAMESPACE::Format vertexFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+    VULKAN_HPP_NAMESPACE::Bool32 allowsTransforms = {};
+
+  };
+  static_assert( sizeof( AccelerationStructureCreateGeometryTypeInfoKHR ) == sizeof( VkAccelerationStructureCreateGeometryTypeInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<AccelerationStructureCreateGeometryTypeInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eAccelerationStructureCreateGeometryTypeInfoKHR>
+  {
+    using Type = AccelerationStructureCreateGeometryTypeInfoKHR;
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+  struct AccelerationStructureCreateInfoKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureCreateInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoKHR(VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel, VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ = {}, uint32_t maxGeometryCount_ = {}, const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateGeometryTypeInfoKHR* pGeometryInfos_ = {}, VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}) VULKAN_HPP_NOEXCEPT
+    : compactedSize( compactedSize_ ), type( type_ ), flags( flags_ ), maxGeometryCount( maxGeometryCount_ ), pGeometryInfos( pGeometryInfos_ ), deviceAddress( deviceAddress_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoKHR( AccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AccelerationStructureCreateInfoKHR( VkAccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    AccelerationStructureCreateInfoKHR( VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_, VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_, VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateGeometryTypeInfoKHR> const & geometryInfos_, VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} )
+    : compactedSize( compactedSize_ ), type( type_ ), flags( flags_ ), maxGeometryCount( static_cast<uint32_t>( geometryInfos_.size() ) ), pGeometryInfos( geometryInfos_.data() ), deviceAddress( deviceAddress_ )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    AccelerationStructureCreateInfoKHR & operator=( VkAccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+    AccelerationStructureCreateInfoKHR & operator=( AccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( AccelerationStructureCreateInfoKHR ) );
+      return *this;
+    }
+
+    AccelerationStructureCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    AccelerationStructureCreateInfoKHR & setCompactedSize( VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      compactedSize = compactedSize_;
+      return *this;
+    }
+
+    AccelerationStructureCreateInfoKHR & setType( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ ) VULKAN_HPP_NOEXCEPT
+    {
+      type = type_;
+      return *this;
+    }
+
+    AccelerationStructureCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    AccelerationStructureCreateInfoKHR & setMaxGeometryCount( uint32_t maxGeometryCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxGeometryCount = maxGeometryCount_;
+      return *this;
+    }
+
+    AccelerationStructureCreateInfoKHR & setPGeometryInfos( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateGeometryTypeInfoKHR* pGeometryInfos_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pGeometryInfos = pGeometryInfos_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    AccelerationStructureCreateInfoKHR & setGeometryInfos( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateGeometryTypeInfoKHR> const & geometryInfos_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxGeometryCount = static_cast<uint32_t>( geometryInfos_.size() );
+      pGeometryInfos = geometryInfos_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    AccelerationStructureCreateInfoKHR & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
+    {
+      deviceAddress = deviceAddress_;
+      return *this;
+    }
+
+
+    operator VkAccelerationStructureCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAccelerationStructureCreateInfoKHR*>( this );
+    }
+
+    operator VkAccelerationStructureCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAccelerationStructureCreateInfoKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( AccelerationStructureCreateInfoKHR const& ) const = default;
+#else
+    bool operator==( AccelerationStructureCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( compactedSize == rhs.compactedSize )
+          && ( type == rhs.type )
+          && ( flags == rhs.flags )
+          && ( maxGeometryCount == rhs.maxGeometryCount )
+          && ( pGeometryInfos == rhs.pGeometryInfos )
+          && ( deviceAddress == rhs.deviceAddress );
+    }
+
+    bool operator!=( AccelerationStructureCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureCreateInfoKHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize compactedSize = {};
+    VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel;
+    VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags = {};
+    uint32_t maxGeometryCount = {};
+    const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateGeometryTypeInfoKHR* pGeometryInfos = {};
+    VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {};
+
+  };
+  static_assert( sizeof( AccelerationStructureCreateInfoKHR ) == sizeof( VkAccelerationStructureCreateInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<AccelerationStructureCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eAccelerationStructureCreateInfoKHR>
+  {
+    using Type = AccelerationStructureCreateInfoKHR;
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  class Buffer
+  {
+  public:
+    using CType = VkBuffer;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eBuffer;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBuffer;
+
+  public:
+    VULKAN_HPP_CONSTEXPR Buffer() VULKAN_HPP_NOEXCEPT
+      : m_buffer(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR Buffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_buffer(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT Buffer( VkBuffer buffer ) VULKAN_HPP_NOEXCEPT
+      : m_buffer( buffer )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    Buffer & operator=(VkBuffer buffer) VULKAN_HPP_NOEXCEPT
+    {
+      m_buffer = buffer;
+      return *this;
+    }
+#endif
+
+    Buffer & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_buffer = VK_NULL_HANDLE;
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( Buffer const& ) const = default;
+#else
+    bool operator==( Buffer const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_buffer == rhs.m_buffer;
+    }
+
+    bool operator!=(Buffer const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_buffer != rhs.m_buffer;
+    }
+
+    bool operator<(Buffer const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_buffer < rhs.m_buffer;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkBuffer() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_buffer;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_buffer != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_buffer == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkBuffer m_buffer;
+  };
+  static_assert( sizeof( VULKAN_HPP_NAMESPACE::Buffer ) == sizeof( VkBuffer ), "handle and wrapper have different size!" );
+
+  template <>
+  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eBuffer>
+  {
+    using type = VULKAN_HPP_NAMESPACE::Buffer;
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eBuffer>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::Buffer;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBuffer>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::Buffer;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Buffer>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  struct GeometryTrianglesNV
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeometryTrianglesNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR GeometryTrianglesNV(VULKAN_HPP_NAMESPACE::Buffer vertexData_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize vertexOffset_ = {}, uint32_t vertexCount_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ = {}, VULKAN_HPP_NAMESPACE::Format vertexFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::Buffer indexData_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize indexOffset_ = {}, uint32_t indexCount_ = {}, VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16, VULKAN_HPP_NAMESPACE::Buffer transformData_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize transformOffset_ = {}) VULKAN_HPP_NOEXCEPT
+    : vertexData( vertexData_ ), vertexOffset( vertexOffset_ ), vertexCount( vertexCount_ ), vertexStride( vertexStride_ ), vertexFormat( vertexFormat_ ), indexData( indexData_ ), indexOffset( indexOffset_ ), indexCount( indexCount_ ), indexType( indexType_ ), transformData( transformData_ ), transformOffset( transformOffset_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR GeometryTrianglesNV( GeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    GeometryTrianglesNV( VkGeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    GeometryTrianglesNV & operator=( VkGeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeometryTrianglesNV const *>( &rhs );
+      return *this;
+    }
+
+    GeometryTrianglesNV & operator=( GeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( GeometryTrianglesNV ) );
+      return *this;
+    }
+
+    GeometryTrianglesNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    GeometryTrianglesNV & setVertexData( VULKAN_HPP_NAMESPACE::Buffer vertexData_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vertexData = vertexData_;
+      return *this;
+    }
+
+    GeometryTrianglesNV & setVertexOffset( VULKAN_HPP_NAMESPACE::DeviceSize vertexOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vertexOffset = vertexOffset_;
+      return *this;
+    }
+
+    GeometryTrianglesNV & setVertexCount( uint32_t vertexCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vertexCount = vertexCount_;
+      return *this;
+    }
+
+    GeometryTrianglesNV & setVertexStride( VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vertexStride = vertexStride_;
+      return *this;
+    }
+
+    GeometryTrianglesNV & setVertexFormat( VULKAN_HPP_NAMESPACE::Format vertexFormat_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vertexFormat = vertexFormat_;
+      return *this;
+    }
+
+    GeometryTrianglesNV & setIndexData( VULKAN_HPP_NAMESPACE::Buffer indexData_ ) VULKAN_HPP_NOEXCEPT
+    {
+      indexData = indexData_;
+      return *this;
+    }
+
+    GeometryTrianglesNV & setIndexOffset( VULKAN_HPP_NAMESPACE::DeviceSize indexOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      indexOffset = indexOffset_;
+      return *this;
+    }
+
+    GeometryTrianglesNV & setIndexCount( uint32_t indexCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      indexCount = indexCount_;
+      return *this;
+    }
+
+    GeometryTrianglesNV & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      indexType = indexType_;
+      return *this;
+    }
+
+    GeometryTrianglesNV & setTransformData( VULKAN_HPP_NAMESPACE::Buffer transformData_ ) VULKAN_HPP_NOEXCEPT
+    {
+      transformData = transformData_;
+      return *this;
+    }
+
+    GeometryTrianglesNV & setTransformOffset( VULKAN_HPP_NAMESPACE::DeviceSize transformOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      transformOffset = transformOffset_;
+      return *this;
+    }
+
+
+    operator VkGeometryTrianglesNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkGeometryTrianglesNV*>( this );
+    }
+
+    operator VkGeometryTrianglesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkGeometryTrianglesNV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( GeometryTrianglesNV const& ) const = default;
+#else
+    bool operator==( GeometryTrianglesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
@@ -20369,94 +15796,115 @@
           && ( transformOffset == rhs.transformOffset );
     }
 
-    bool operator!=( GeometryTrianglesNV const& rhs ) const
+    bool operator!=( GeometryTrianglesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
+#endif
 
-  private:
-    StructureType sType = StructureType::eGeometryTrianglesNV;
+
 
   public:
-    const void* pNext = nullptr;
-    Buffer vertexData;
-    DeviceSize vertexOffset;
-    uint32_t vertexCount;
-    DeviceSize vertexStride;
-    Format vertexFormat;
-    Buffer indexData;
-    DeviceSize indexOffset;
-    uint32_t indexCount;
-    IndexType indexType;
-    Buffer transformData;
-    DeviceSize transformOffset;
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeometryTrianglesNV;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Buffer vertexData = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize vertexOffset = {};
+    uint32_t vertexCount = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize vertexStride = {};
+    VULKAN_HPP_NAMESPACE::Format vertexFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+    VULKAN_HPP_NAMESPACE::Buffer indexData = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize indexOffset = {};
+    uint32_t indexCount = {};
+    VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16;
+    VULKAN_HPP_NAMESPACE::Buffer transformData = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize transformOffset = {};
+
   };
   static_assert( sizeof( GeometryTrianglesNV ) == sizeof( VkGeometryTrianglesNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<GeometryTrianglesNV>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eGeometryTrianglesNV>
+  {
+    using Type = GeometryTrianglesNV;
+  };
 
   struct GeometryAABBNV
   {
-    GeometryAABBNV( Buffer aabbData_ = Buffer(),
-                    uint32_t numAABBs_ = 0,
-                    uint32_t stride_ = 0,
-                    DeviceSize offset_ = 0 )
-      : aabbData( aabbData_ )
-      , numAABBs( numAABBs_ )
-      , stride( stride_ )
-      , offset( offset_ )
-    {
-    }
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeometryAabbNV;
 
-    GeometryAABBNV( VkGeometryAABBNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( GeometryAABBNV ) );
-    }
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR GeometryAABBNV(VULKAN_HPP_NAMESPACE::Buffer aabbData_ = {}, uint32_t numAABBs_ = {}, uint32_t stride_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}) VULKAN_HPP_NOEXCEPT
+    : aabbData( aabbData_ ), numAABBs( numAABBs_ ), stride( stride_ ), offset( offset_ )
+    {}
 
-    GeometryAABBNV& operator=( VkGeometryAABBNV const & rhs )
+    VULKAN_HPP_CONSTEXPR GeometryAABBNV( GeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    GeometryAABBNV( VkGeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      memcpy( this, &rhs, sizeof( GeometryAABBNV ) );
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    GeometryAABBNV & operator=( VkGeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeometryAABBNV const *>( &rhs );
       return *this;
     }
-    GeometryAABBNV& setPNext( const void* pNext_ )
+
+    GeometryAABBNV & operator=( GeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( GeometryAABBNV ) );
+      return *this;
+    }
+
+    GeometryAABBNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
       pNext = pNext_;
       return *this;
     }
 
-    GeometryAABBNV& setAabbData( Buffer aabbData_ )
+    GeometryAABBNV & setAabbData( VULKAN_HPP_NAMESPACE::Buffer aabbData_ ) VULKAN_HPP_NOEXCEPT
     {
       aabbData = aabbData_;
       return *this;
     }
 
-    GeometryAABBNV& setNumAABBs( uint32_t numAABBs_ )
+    GeometryAABBNV & setNumAABBs( uint32_t numAABBs_ ) VULKAN_HPP_NOEXCEPT
     {
       numAABBs = numAABBs_;
       return *this;
     }
 
-    GeometryAABBNV& setStride( uint32_t stride_ )
+    GeometryAABBNV & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT
     {
       stride = stride_;
       return *this;
     }
 
-    GeometryAABBNV& setOffset( DeviceSize offset_ )
+    GeometryAABBNV & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
     {
       offset = offset_;
       return *this;
     }
 
-    operator VkGeometryAABBNV const&() const
+
+    operator VkGeometryAABBNV const&() const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkGeometryAABBNV*>(this);
+      return *reinterpret_cast<const VkGeometryAABBNV*>( this );
     }
 
-    operator VkGeometryAABBNV &()
+    operator VkGeometryAABBNV &() VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkGeometryAABBNV*>(this);
+      return *reinterpret_cast<VkGeometryAABBNV*>( this );
     }
 
-    bool operator==( GeometryAABBNV const& rhs ) const
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( GeometryAABBNV const& ) const = default;
+#else
+    bool operator==( GeometryAABBNV const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
@@ -20466,152 +15914,3536 @@
           && ( offset == rhs.offset );
     }
 
-    bool operator!=( GeometryAABBNV const& rhs ) const
+    bool operator!=( GeometryAABBNV const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
+#endif
 
-  private:
-    StructureType sType = StructureType::eGeometryAabbNV;
+
 
   public:
-    const void* pNext = nullptr;
-    Buffer aabbData;
-    uint32_t numAABBs;
-    uint32_t stride;
-    DeviceSize offset;
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeometryAabbNV;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Buffer aabbData = {};
+    uint32_t numAABBs = {};
+    uint32_t stride = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
+
   };
   static_assert( sizeof( GeometryAABBNV ) == sizeof( VkGeometryAABBNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<GeometryAABBNV>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eGeometryAabbNV>
+  {
+    using Type = GeometryAABBNV;
+  };
 
   struct GeometryDataNV
   {
-    GeometryDataNV( GeometryTrianglesNV triangles_ = GeometryTrianglesNV(),
-                    GeometryAABBNV aabbs_ = GeometryAABBNV() )
-      : triangles( triangles_ )
-      , aabbs( aabbs_ )
-    {
-    }
 
-    GeometryDataNV( VkGeometryDataNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( GeometryDataNV ) );
-    }
 
-    GeometryDataNV& operator=( VkGeometryDataNV const & rhs )
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR GeometryDataNV(VULKAN_HPP_NAMESPACE::GeometryTrianglesNV triangles_ = {}, VULKAN_HPP_NAMESPACE::GeometryAABBNV aabbs_ = {}) VULKAN_HPP_NOEXCEPT
+    : triangles( triangles_ ), aabbs( aabbs_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR GeometryDataNV( GeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    GeometryDataNV( VkGeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      memcpy( this, &rhs, sizeof( GeometryDataNV ) );
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    GeometryDataNV & operator=( VkGeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeometryDataNV const *>( &rhs );
       return *this;
     }
-    GeometryDataNV& setTriangles( GeometryTrianglesNV triangles_ )
+
+    GeometryDataNV & operator=( GeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( GeometryDataNV ) );
+      return *this;
+    }
+
+    GeometryDataNV & setTriangles( VULKAN_HPP_NAMESPACE::GeometryTrianglesNV const & triangles_ ) VULKAN_HPP_NOEXCEPT
     {
       triangles = triangles_;
       return *this;
     }
 
-    GeometryDataNV& setAabbs( GeometryAABBNV aabbs_ )
+    GeometryDataNV & setAabbs( VULKAN_HPP_NAMESPACE::GeometryAABBNV const & aabbs_ ) VULKAN_HPP_NOEXCEPT
     {
       aabbs = aabbs_;
       return *this;
     }
 
-    operator VkGeometryDataNV const&() const
+
+    operator VkGeometryDataNV const&() const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkGeometryDataNV*>(this);
+      return *reinterpret_cast<const VkGeometryDataNV*>( this );
     }
 
-    operator VkGeometryDataNV &()
+    operator VkGeometryDataNV &() VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkGeometryDataNV*>(this);
+      return *reinterpret_cast<VkGeometryDataNV*>( this );
     }
 
-    bool operator==( GeometryDataNV const& rhs ) const
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( GeometryDataNV const& ) const = default;
+#else
+    bool operator==( GeometryDataNV const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( triangles == rhs.triangles )
           && ( aabbs == rhs.aabbs );
     }
 
-    bool operator!=( GeometryDataNV const& rhs ) const
+    bool operator!=( GeometryDataNV const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
+#endif
 
-    GeometryTrianglesNV triangles;
-    GeometryAABBNV aabbs;
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::GeometryTrianglesNV triangles = {};
+    VULKAN_HPP_NAMESPACE::GeometryAABBNV aabbs = {};
+
   };
   static_assert( sizeof( GeometryDataNV ) == sizeof( VkGeometryDataNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<GeometryDataNV>::value, "struct wrapper is not a standard layout!" );
 
-  struct BindAccelerationStructureMemoryInfoNV
+  struct GeometryNV
   {
-    BindAccelerationStructureMemoryInfoNV( AccelerationStructureNV accelerationStructure_ = AccelerationStructureNV(),
-                                           DeviceMemory memory_ = DeviceMemory(),
-                                           DeviceSize memoryOffset_ = 0,
-                                           uint32_t deviceIndexCount_ = 0,
-                                           const uint32_t* pDeviceIndices_ = nullptr )
-      : accelerationStructure( accelerationStructure_ )
-      , memory( memory_ )
-      , memoryOffset( memoryOffset_ )
-      , deviceIndexCount( deviceIndexCount_ )
-      , pDeviceIndices( pDeviceIndices_ )
-    {
-    }
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeometryNV;
 
-    BindAccelerationStructureMemoryInfoNV( VkBindAccelerationStructureMemoryInfoNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( BindAccelerationStructureMemoryInfoNV ) );
-    }
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR GeometryNV(VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles, VULKAN_HPP_NAMESPACE::GeometryDataNV geometry_ = {}, VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ = {}) VULKAN_HPP_NOEXCEPT
+    : geometryType( geometryType_ ), geometry( geometry_ ), flags( flags_ )
+    {}
 
-    BindAccelerationStructureMemoryInfoNV& operator=( VkBindAccelerationStructureMemoryInfoNV const & rhs )
+    VULKAN_HPP_CONSTEXPR GeometryNV( GeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    GeometryNV( VkGeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      memcpy( this, &rhs, sizeof( BindAccelerationStructureMemoryInfoNV ) );
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    GeometryNV & operator=( VkGeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeometryNV const *>( &rhs );
       return *this;
     }
-    BindAccelerationStructureMemoryInfoNV& setPNext( const void* pNext_ )
+
+    GeometryNV & operator=( GeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( GeometryNV ) );
+      return *this;
+    }
+
+    GeometryNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
       pNext = pNext_;
       return *this;
     }
 
-    BindAccelerationStructureMemoryInfoNV& setAccelerationStructure( AccelerationStructureNV accelerationStructure_ )
+    GeometryNV & setGeometryType( VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      geometryType = geometryType_;
+      return *this;
+    }
+
+    GeometryNV & setGeometry( VULKAN_HPP_NAMESPACE::GeometryDataNV const & geometry_ ) VULKAN_HPP_NOEXCEPT
+    {
+      geometry = geometry_;
+      return *this;
+    }
+
+    GeometryNV & setFlags( VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+
+    operator VkGeometryNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkGeometryNV*>( this );
+    }
+
+    operator VkGeometryNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkGeometryNV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( GeometryNV const& ) const = default;
+#else
+    bool operator==( GeometryNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( geometryType == rhs.geometryType )
+          && ( geometry == rhs.geometry )
+          && ( flags == rhs.flags );
+    }
+
+    bool operator!=( GeometryNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeometryNV;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles;
+    VULKAN_HPP_NAMESPACE::GeometryDataNV geometry = {};
+    VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags = {};
+
+  };
+  static_assert( sizeof( GeometryNV ) == sizeof( VkGeometryNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<GeometryNV>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eGeometryNV>
+  {
+    using Type = GeometryNV;
+  };
+
+  struct AccelerationStructureInfoNV
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureInfoNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR AccelerationStructureInfoNV(VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type_ = {}, VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags_ = {}, uint32_t instanceCount_ = {}, uint32_t geometryCount_ = {}, const VULKAN_HPP_NAMESPACE::GeometryNV* pGeometries_ = {}) VULKAN_HPP_NOEXCEPT
+    : type( type_ ), flags( flags_ ), instanceCount( instanceCount_ ), geometryCount( geometryCount_ ), pGeometries( pGeometries_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR AccelerationStructureInfoNV( AccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AccelerationStructureInfoNV( VkAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    AccelerationStructureInfoNV( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type_, VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags_, uint32_t instanceCount_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::GeometryNV> const & geometries_ )
+    : type( type_ ), flags( flags_ ), instanceCount( instanceCount_ ), geometryCount( static_cast<uint32_t>( geometries_.size() ) ), pGeometries( geometries_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    AccelerationStructureInfoNV & operator=( VkAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV const *>( &rhs );
+      return *this;
+    }
+
+    AccelerationStructureInfoNV & operator=( AccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( AccelerationStructureInfoNV ) );
+      return *this;
+    }
+
+    AccelerationStructureInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    AccelerationStructureInfoNV & setType( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type_ ) VULKAN_HPP_NOEXCEPT
+    {
+      type = type_;
+      return *this;
+    }
+
+    AccelerationStructureInfoNV & setFlags( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    AccelerationStructureInfoNV & setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      instanceCount = instanceCount_;
+      return *this;
+    }
+
+    AccelerationStructureInfoNV & setGeometryCount( uint32_t geometryCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      geometryCount = geometryCount_;
+      return *this;
+    }
+
+    AccelerationStructureInfoNV & setPGeometries( const VULKAN_HPP_NAMESPACE::GeometryNV* pGeometries_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pGeometries = pGeometries_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    AccelerationStructureInfoNV & setGeometries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::GeometryNV> const & geometries_ ) VULKAN_HPP_NOEXCEPT
+    {
+      geometryCount = static_cast<uint32_t>( geometries_.size() );
+      pGeometries = geometries_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkAccelerationStructureInfoNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAccelerationStructureInfoNV*>( this );
+    }
+
+    operator VkAccelerationStructureInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAccelerationStructureInfoNV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( AccelerationStructureInfoNV const& ) const = default;
+#else
+    bool operator==( AccelerationStructureInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( type == rhs.type )
+          && ( flags == rhs.flags )
+          && ( instanceCount == rhs.instanceCount )
+          && ( geometryCount == rhs.geometryCount )
+          && ( pGeometries == rhs.pGeometries );
+    }
+
+    bool operator!=( AccelerationStructureInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureInfoNV;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type = {};
+    VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags = {};
+    uint32_t instanceCount = {};
+    uint32_t geometryCount = {};
+    const VULKAN_HPP_NAMESPACE::GeometryNV* pGeometries = {};
+
+  };
+  static_assert( sizeof( AccelerationStructureInfoNV ) == sizeof( VkAccelerationStructureInfoNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<AccelerationStructureInfoNV>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eAccelerationStructureInfoNV>
+  {
+    using Type = AccelerationStructureInfoNV;
+  };
+
+  struct AccelerationStructureCreateInfoNV
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureCreateInfoNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoNV(VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV info_ = {}) VULKAN_HPP_NOEXCEPT
+    : compactedSize( compactedSize_ ), info( info_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoNV( AccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AccelerationStructureCreateInfoNV( VkAccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    AccelerationStructureCreateInfoNV & operator=( VkAccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV const *>( &rhs );
+      return *this;
+    }
+
+    AccelerationStructureCreateInfoNV & operator=( AccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( AccelerationStructureCreateInfoNV ) );
+      return *this;
+    }
+
+    AccelerationStructureCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    AccelerationStructureCreateInfoNV & setCompactedSize( VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      compactedSize = compactedSize_;
+      return *this;
+    }
+
+    AccelerationStructureCreateInfoNV & setInfo( VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV const & info_ ) VULKAN_HPP_NOEXCEPT
+    {
+      info = info_;
+      return *this;
+    }
+
+
+    operator VkAccelerationStructureCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAccelerationStructureCreateInfoNV*>( this );
+    }
+
+    operator VkAccelerationStructureCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAccelerationStructureCreateInfoNV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( AccelerationStructureCreateInfoNV const& ) const = default;
+#else
+    bool operator==( AccelerationStructureCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( compactedSize == rhs.compactedSize )
+          && ( info == rhs.info );
+    }
+
+    bool operator!=( AccelerationStructureCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureCreateInfoNV;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize compactedSize = {};
+    VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV info = {};
+
+  };
+  static_assert( sizeof( AccelerationStructureCreateInfoNV ) == sizeof( VkAccelerationStructureCreateInfoNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<AccelerationStructureCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eAccelerationStructureCreateInfoNV>
+  {
+    using Type = AccelerationStructureCreateInfoNV;
+  };
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+  struct AccelerationStructureDeviceAddressInfoKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureDeviceAddressInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR AccelerationStructureDeviceAddressInfoKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ = {}) VULKAN_HPP_NOEXCEPT
+    : accelerationStructure( accelerationStructure_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR AccelerationStructureDeviceAddressInfoKHR( AccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AccelerationStructureDeviceAddressInfoKHR( VkAccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    AccelerationStructureDeviceAddressInfoKHR & operator=( VkAccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+    AccelerationStructureDeviceAddressInfoKHR & operator=( AccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( AccelerationStructureDeviceAddressInfoKHR ) );
+      return *this;
+    }
+
+    AccelerationStructureDeviceAddressInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    AccelerationStructureDeviceAddressInfoKHR & setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ ) VULKAN_HPP_NOEXCEPT
     {
       accelerationStructure = accelerationStructure_;
       return *this;
     }
 
-    BindAccelerationStructureMemoryInfoNV& setMemory( DeviceMemory memory_ )
+
+    operator VkAccelerationStructureDeviceAddressInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAccelerationStructureDeviceAddressInfoKHR*>( this );
+    }
+
+    operator VkAccelerationStructureDeviceAddressInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAccelerationStructureDeviceAddressInfoKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( AccelerationStructureDeviceAddressInfoKHR const& ) const = default;
+#else
+    bool operator==( AccelerationStructureDeviceAddressInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( accelerationStructure == rhs.accelerationStructure );
+    }
+
+    bool operator!=( AccelerationStructureDeviceAddressInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureDeviceAddressInfoKHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure = {};
+
+  };
+  static_assert( sizeof( AccelerationStructureDeviceAddressInfoKHR ) == sizeof( VkAccelerationStructureDeviceAddressInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<AccelerationStructureDeviceAddressInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eAccelerationStructureDeviceAddressInfoKHR>
+  {
+    using Type = AccelerationStructureDeviceAddressInfoKHR;
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  struct TransformMatrixKHR
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR_14 TransformMatrixKHR(std::array<std::array<float,4>,3> const& matrix_ = {}) VULKAN_HPP_NOEXCEPT
+    : matrix( matrix_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 TransformMatrixKHR( TransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    TransformMatrixKHR( VkTransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    TransformMatrixKHR & operator=( VkTransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::TransformMatrixKHR const *>( &rhs );
+      return *this;
+    }
+
+    TransformMatrixKHR & operator=( TransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( TransformMatrixKHR ) );
+      return *this;
+    }
+
+    TransformMatrixKHR & setMatrix( std::array<std::array<float,4>,3> matrix_ ) VULKAN_HPP_NOEXCEPT
+    {
+      matrix = matrix_;
+      return *this;
+    }
+
+
+    operator VkTransformMatrixKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkTransformMatrixKHR*>( this );
+    }
+
+    operator VkTransformMatrixKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkTransformMatrixKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( TransformMatrixKHR const& ) const = default;
+#else
+    bool operator==( TransformMatrixKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( matrix == rhs.matrix );
+    }
+
+    bool operator!=( TransformMatrixKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::ArrayWrapper2D<float, 3, 4> matrix = {};
+
+  };
+  static_assert( sizeof( TransformMatrixKHR ) == sizeof( VkTransformMatrixKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<TransformMatrixKHR>::value, "struct wrapper is not a standard layout!" );
+  using TransformMatrixNV = TransformMatrixKHR;
+
+  struct AccelerationStructureInstanceKHR
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR(VULKAN_HPP_NAMESPACE::TransformMatrixKHR transform_ = {}, uint32_t instanceCustomIndex_ = {}, uint32_t mask_ = {}, uint32_t instanceShaderBindingTableRecordOffset_ = {}, VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ = {}, uint64_t accelerationStructureReference_ = {}) VULKAN_HPP_NOEXCEPT
+    : transform( transform_ ), instanceCustomIndex( instanceCustomIndex_ ), mask( mask_ ), instanceShaderBindingTableRecordOffset( instanceShaderBindingTableRecordOffset_ ), flags( flags_ ), accelerationStructureReference( accelerationStructureReference_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR( AccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AccelerationStructureInstanceKHR( VkAccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    AccelerationStructureInstanceKHR & operator=( VkAccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR const *>( &rhs );
+      return *this;
+    }
+
+    AccelerationStructureInstanceKHR & operator=( AccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( AccelerationStructureInstanceKHR ) );
+      return *this;
+    }
+
+    AccelerationStructureInstanceKHR & setTransform( VULKAN_HPP_NAMESPACE::TransformMatrixKHR const & transform_ ) VULKAN_HPP_NOEXCEPT
+    {
+      transform = transform_;
+      return *this;
+    }
+
+    AccelerationStructureInstanceKHR & setInstanceCustomIndex( uint32_t instanceCustomIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      instanceCustomIndex = instanceCustomIndex_;
+      return *this;
+    }
+
+    AccelerationStructureInstanceKHR & setMask( uint32_t mask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      mask = mask_;
+      return *this;
+    }
+
+    AccelerationStructureInstanceKHR & setInstanceShaderBindingTableRecordOffset( uint32_t instanceShaderBindingTableRecordOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      instanceShaderBindingTableRecordOffset = instanceShaderBindingTableRecordOffset_;
+      return *this;
+    }
+
+    AccelerationStructureInstanceKHR & setFlags( VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = *reinterpret_cast<VkGeometryInstanceFlagsKHR*>(&flags_);
+      return *this;
+    }
+
+    AccelerationStructureInstanceKHR & setAccelerationStructureReference( uint64_t accelerationStructureReference_ ) VULKAN_HPP_NOEXCEPT
+    {
+      accelerationStructureReference = accelerationStructureReference_;
+      return *this;
+    }
+
+
+    operator VkAccelerationStructureInstanceKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAccelerationStructureInstanceKHR*>( this );
+    }
+
+    operator VkAccelerationStructureInstanceKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAccelerationStructureInstanceKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( AccelerationStructureInstanceKHR const& ) const = default;
+#else
+    bool operator==( AccelerationStructureInstanceKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( transform == rhs.transform )
+          && ( instanceCustomIndex == rhs.instanceCustomIndex )
+          && ( mask == rhs.mask )
+          && ( instanceShaderBindingTableRecordOffset == rhs.instanceShaderBindingTableRecordOffset )
+          && ( flags == rhs.flags )
+          && ( accelerationStructureReference == rhs.accelerationStructureReference );
+    }
+
+    bool operator!=( AccelerationStructureInstanceKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::TransformMatrixKHR transform = {};
+    uint32_t instanceCustomIndex : 24;
+    uint32_t mask : 8;
+    uint32_t instanceShaderBindingTableRecordOffset : 24;
+    VkGeometryInstanceFlagsKHR flags : 8;
+    uint64_t accelerationStructureReference = {};
+
+  };
+  static_assert( sizeof( AccelerationStructureInstanceKHR ) == sizeof( VkAccelerationStructureInstanceKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<AccelerationStructureInstanceKHR>::value, "struct wrapper is not a standard layout!" );
+  using AccelerationStructureInstanceNV = AccelerationStructureInstanceKHR;
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+  struct AccelerationStructureMemoryRequirementsInfoKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureMemoryRequirementsInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR AccelerationStructureMemoryRequirementsInfoKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeKHR type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeKHR::eObject, VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType_ = VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR::eHost, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ = {}) VULKAN_HPP_NOEXCEPT
+    : type( type_ ), buildType( buildType_ ), accelerationStructure( accelerationStructure_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR AccelerationStructureMemoryRequirementsInfoKHR( AccelerationStructureMemoryRequirementsInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AccelerationStructureMemoryRequirementsInfoKHR( VkAccelerationStructureMemoryRequirementsInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    AccelerationStructureMemoryRequirementsInfoKHR & operator=( VkAccelerationStructureMemoryRequirementsInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+    AccelerationStructureMemoryRequirementsInfoKHR & operator=( AccelerationStructureMemoryRequirementsInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( AccelerationStructureMemoryRequirementsInfoKHR ) );
+      return *this;
+    }
+
+    AccelerationStructureMemoryRequirementsInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    AccelerationStructureMemoryRequirementsInfoKHR & setType( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeKHR type_ ) VULKAN_HPP_NOEXCEPT
+    {
+      type = type_;
+      return *this;
+    }
+
+    AccelerationStructureMemoryRequirementsInfoKHR & setBuildType( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      buildType = buildType_;
+      return *this;
+    }
+
+    AccelerationStructureMemoryRequirementsInfoKHR & setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ ) VULKAN_HPP_NOEXCEPT
+    {
+      accelerationStructure = accelerationStructure_;
+      return *this;
+    }
+
+
+    operator VkAccelerationStructureMemoryRequirementsInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoKHR*>( this );
+    }
+
+    operator VkAccelerationStructureMemoryRequirementsInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAccelerationStructureMemoryRequirementsInfoKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( AccelerationStructureMemoryRequirementsInfoKHR const& ) const = default;
+#else
+    bool operator==( AccelerationStructureMemoryRequirementsInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( type == rhs.type )
+          && ( buildType == rhs.buildType )
+          && ( accelerationStructure == rhs.accelerationStructure );
+    }
+
+    bool operator!=( AccelerationStructureMemoryRequirementsInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureMemoryRequirementsInfoKHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeKHR type = VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeKHR::eObject;
+    VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType = VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR::eHost;
+    VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure = {};
+
+  };
+  static_assert( sizeof( AccelerationStructureMemoryRequirementsInfoKHR ) == sizeof( VkAccelerationStructureMemoryRequirementsInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<AccelerationStructureMemoryRequirementsInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eAccelerationStructureMemoryRequirementsInfoKHR>
+  {
+    using Type = AccelerationStructureMemoryRequirementsInfoKHR;
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  struct AccelerationStructureMemoryRequirementsInfoNV
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureMemoryRequirementsInfoNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR AccelerationStructureMemoryRequirementsInfoNV(VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ = {}) VULKAN_HPP_NOEXCEPT
+    : type( type_ ), accelerationStructure( accelerationStructure_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR AccelerationStructureMemoryRequirementsInfoNV( AccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AccelerationStructureMemoryRequirementsInfoNV( VkAccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    AccelerationStructureMemoryRequirementsInfoNV & operator=( VkAccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV const *>( &rhs );
+      return *this;
+    }
+
+    AccelerationStructureMemoryRequirementsInfoNV & operator=( AccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( AccelerationStructureMemoryRequirementsInfoNV ) );
+      return *this;
+    }
+
+    AccelerationStructureMemoryRequirementsInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    AccelerationStructureMemoryRequirementsInfoNV & setType( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type_ ) VULKAN_HPP_NOEXCEPT
+    {
+      type = type_;
+      return *this;
+    }
+
+    AccelerationStructureMemoryRequirementsInfoNV & setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ ) VULKAN_HPP_NOEXCEPT
+    {
+      accelerationStructure = accelerationStructure_;
+      return *this;
+    }
+
+
+    operator VkAccelerationStructureMemoryRequirementsInfoNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV*>( this );
+    }
+
+    operator VkAccelerationStructureMemoryRequirementsInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAccelerationStructureMemoryRequirementsInfoNV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( AccelerationStructureMemoryRequirementsInfoNV const& ) const = default;
+#else
+    bool operator==( AccelerationStructureMemoryRequirementsInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( type == rhs.type )
+          && ( accelerationStructure == rhs.accelerationStructure );
+    }
+
+    bool operator!=( AccelerationStructureMemoryRequirementsInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureMemoryRequirementsInfoNV;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type = {};
+    VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure = {};
+
+  };
+  static_assert( sizeof( AccelerationStructureMemoryRequirementsInfoNV ) == sizeof( VkAccelerationStructureMemoryRequirementsInfoNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<AccelerationStructureMemoryRequirementsInfoNV>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eAccelerationStructureMemoryRequirementsInfoNV>
+  {
+    using Type = AccelerationStructureMemoryRequirementsInfoNV;
+  };
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+  struct AccelerationStructureVersionKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureVersionKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR AccelerationStructureVersionKHR(const uint8_t* versionData_ = {}) VULKAN_HPP_NOEXCEPT
+    : versionData( versionData_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR AccelerationStructureVersionKHR( AccelerationStructureVersionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AccelerationStructureVersionKHR( VkAccelerationStructureVersionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    AccelerationStructureVersionKHR & operator=( VkAccelerationStructureVersionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureVersionKHR const *>( &rhs );
+      return *this;
+    }
+
+    AccelerationStructureVersionKHR & operator=( AccelerationStructureVersionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( AccelerationStructureVersionKHR ) );
+      return *this;
+    }
+
+    AccelerationStructureVersionKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    AccelerationStructureVersionKHR & setVersionData( const uint8_t* versionData_ ) VULKAN_HPP_NOEXCEPT
+    {
+      versionData = versionData_;
+      return *this;
+    }
+
+
+    operator VkAccelerationStructureVersionKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAccelerationStructureVersionKHR*>( this );
+    }
+
+    operator VkAccelerationStructureVersionKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAccelerationStructureVersionKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( AccelerationStructureVersionKHR const& ) const = default;
+#else
+    bool operator==( AccelerationStructureVersionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( versionData == rhs.versionData );
+    }
+
+    bool operator!=( AccelerationStructureVersionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureVersionKHR;
+    const void* pNext = {};
+    const uint8_t* versionData = {};
+
+  };
+  static_assert( sizeof( AccelerationStructureVersionKHR ) == sizeof( VkAccelerationStructureVersionKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<AccelerationStructureVersionKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eAccelerationStructureVersionKHR>
+  {
+    using Type = AccelerationStructureVersionKHR;
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  class SwapchainKHR
+  {
+  public:
+    using CType = VkSwapchainKHR;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSwapchainKHR;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSwapchainKHR;
+
+  public:
+    VULKAN_HPP_CONSTEXPR SwapchainKHR() VULKAN_HPP_NOEXCEPT
+      : m_swapchainKHR(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR SwapchainKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_swapchainKHR(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT SwapchainKHR( VkSwapchainKHR swapchainKHR ) VULKAN_HPP_NOEXCEPT
+      : m_swapchainKHR( swapchainKHR )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    SwapchainKHR & operator=(VkSwapchainKHR swapchainKHR) VULKAN_HPP_NOEXCEPT
+    {
+      m_swapchainKHR = swapchainKHR;
+      return *this;
+    }
+#endif
+
+    SwapchainKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_swapchainKHR = VK_NULL_HANDLE;
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( SwapchainKHR const& ) const = default;
+#else
+    bool operator==( SwapchainKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_swapchainKHR == rhs.m_swapchainKHR;
+    }
+
+    bool operator!=(SwapchainKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_swapchainKHR != rhs.m_swapchainKHR;
+    }
+
+    bool operator<(SwapchainKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_swapchainKHR < rhs.m_swapchainKHR;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSwapchainKHR() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_swapchainKHR;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_swapchainKHR != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_swapchainKHR == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkSwapchainKHR m_swapchainKHR;
+  };
+  static_assert( sizeof( VULKAN_HPP_NAMESPACE::SwapchainKHR ) == sizeof( VkSwapchainKHR ), "handle and wrapper have different size!" );
+
+  template <>
+  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eSwapchainKHR>
+  {
+    using type = VULKAN_HPP_NAMESPACE::SwapchainKHR;
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eSwapchainKHR>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::SwapchainKHR;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSwapchainKHR>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::SwapchainKHR;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::SwapchainKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  class Semaphore
+  {
+  public:
+    using CType = VkSemaphore;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSemaphore;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSemaphore;
+
+  public:
+    VULKAN_HPP_CONSTEXPR Semaphore() VULKAN_HPP_NOEXCEPT
+      : m_semaphore(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR Semaphore( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_semaphore(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT Semaphore( VkSemaphore semaphore ) VULKAN_HPP_NOEXCEPT
+      : m_semaphore( semaphore )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    Semaphore & operator=(VkSemaphore semaphore) VULKAN_HPP_NOEXCEPT
+    {
+      m_semaphore = semaphore;
+      return *this;
+    }
+#endif
+
+    Semaphore & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_semaphore = VK_NULL_HANDLE;
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( Semaphore const& ) const = default;
+#else
+    bool operator==( Semaphore const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_semaphore == rhs.m_semaphore;
+    }
+
+    bool operator!=(Semaphore const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_semaphore != rhs.m_semaphore;
+    }
+
+    bool operator<(Semaphore const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_semaphore < rhs.m_semaphore;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSemaphore() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_semaphore;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_semaphore != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_semaphore == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkSemaphore m_semaphore;
+  };
+  static_assert( sizeof( VULKAN_HPP_NAMESPACE::Semaphore ) == sizeof( VkSemaphore ), "handle and wrapper have different size!" );
+
+  template <>
+  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eSemaphore>
+  {
+    using type = VULKAN_HPP_NAMESPACE::Semaphore;
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eSemaphore>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::Semaphore;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSemaphore>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::Semaphore;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Semaphore>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  class Fence
+  {
+  public:
+    using CType = VkFence;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eFence;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFence;
+
+  public:
+    VULKAN_HPP_CONSTEXPR Fence() VULKAN_HPP_NOEXCEPT
+      : m_fence(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR Fence( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_fence(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT Fence( VkFence fence ) VULKAN_HPP_NOEXCEPT
+      : m_fence( fence )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    Fence & operator=(VkFence fence) VULKAN_HPP_NOEXCEPT
+    {
+      m_fence = fence;
+      return *this;
+    }
+#endif
+
+    Fence & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_fence = VK_NULL_HANDLE;
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( Fence const& ) const = default;
+#else
+    bool operator==( Fence const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_fence == rhs.m_fence;
+    }
+
+    bool operator!=(Fence const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_fence != rhs.m_fence;
+    }
+
+    bool operator<(Fence const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_fence < rhs.m_fence;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkFence() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_fence;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_fence != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_fence == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkFence m_fence;
+  };
+  static_assert( sizeof( VULKAN_HPP_NAMESPACE::Fence ) == sizeof( VkFence ), "handle and wrapper have different size!" );
+
+  template <>
+  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eFence>
+  {
+    using type = VULKAN_HPP_NAMESPACE::Fence;
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eFence>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::Fence;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFence>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::Fence;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Fence>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  struct AcquireNextImageInfoKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAcquireNextImageInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR AcquireNextImageInfoKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {}, uint64_t timeout_ = {}, VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, VULKAN_HPP_NAMESPACE::Fence fence_ = {}, uint32_t deviceMask_ = {}) VULKAN_HPP_NOEXCEPT
+    : swapchain( swapchain_ ), timeout( timeout_ ), semaphore( semaphore_ ), fence( fence_ ), deviceMask( deviceMask_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR AcquireNextImageInfoKHR( AcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AcquireNextImageInfoKHR( VkAcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    AcquireNextImageInfoKHR & operator=( VkAcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+    AcquireNextImageInfoKHR & operator=( AcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( AcquireNextImageInfoKHR ) );
+      return *this;
+    }
+
+    AcquireNextImageInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    AcquireNextImageInfoKHR & setSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT
+    {
+      swapchain = swapchain_;
+      return *this;
+    }
+
+    AcquireNextImageInfoKHR & setTimeout( uint64_t timeout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      timeout = timeout_;
+      return *this;
+    }
+
+    AcquireNextImageInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
+    {
+      semaphore = semaphore_;
+      return *this;
+    }
+
+    AcquireNextImageInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fence = fence_;
+      return *this;
+    }
+
+    AcquireNextImageInfoKHR & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      deviceMask = deviceMask_;
+      return *this;
+    }
+
+
+    operator VkAcquireNextImageInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAcquireNextImageInfoKHR*>( this );
+    }
+
+    operator VkAcquireNextImageInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAcquireNextImageInfoKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( AcquireNextImageInfoKHR const& ) const = default;
+#else
+    bool operator==( AcquireNextImageInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( swapchain == rhs.swapchain )
+          && ( timeout == rhs.timeout )
+          && ( semaphore == rhs.semaphore )
+          && ( fence == rhs.fence )
+          && ( deviceMask == rhs.deviceMask );
+    }
+
+    bool operator!=( AcquireNextImageInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAcquireNextImageInfoKHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {};
+    uint64_t timeout = {};
+    VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
+    VULKAN_HPP_NAMESPACE::Fence fence = {};
+    uint32_t deviceMask = {};
+
+  };
+  static_assert( sizeof( AcquireNextImageInfoKHR ) == sizeof( VkAcquireNextImageInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<AcquireNextImageInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eAcquireNextImageInfoKHR>
+  {
+    using Type = AcquireNextImageInfoKHR;
+  };
+
+  struct AcquireProfilingLockInfoKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAcquireProfilingLockInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR AcquireProfilingLockInfoKHR(VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags_ = {}, uint64_t timeout_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), timeout( timeout_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR AcquireProfilingLockInfoKHR( AcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AcquireProfilingLockInfoKHR( VkAcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    AcquireProfilingLockInfoKHR & operator=( VkAcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+    AcquireProfilingLockInfoKHR & operator=( AcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( AcquireProfilingLockInfoKHR ) );
+      return *this;
+    }
+
+    AcquireProfilingLockInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    AcquireProfilingLockInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    AcquireProfilingLockInfoKHR & setTimeout( uint64_t timeout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      timeout = timeout_;
+      return *this;
+    }
+
+
+    operator VkAcquireProfilingLockInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAcquireProfilingLockInfoKHR*>( this );
+    }
+
+    operator VkAcquireProfilingLockInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAcquireProfilingLockInfoKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( AcquireProfilingLockInfoKHR const& ) const = default;
+#else
+    bool operator==( AcquireProfilingLockInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( timeout == rhs.timeout );
+    }
+
+    bool operator!=( AcquireProfilingLockInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAcquireProfilingLockInfoKHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags = {};
+    uint64_t timeout = {};
+
+  };
+  static_assert( sizeof( AcquireProfilingLockInfoKHR ) == sizeof( VkAcquireProfilingLockInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<AcquireProfilingLockInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eAcquireProfilingLockInfoKHR>
+  {
+    using Type = AcquireProfilingLockInfoKHR;
+  };
+
+  struct AllocationCallbacks
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR AllocationCallbacks(void* pUserData_ = {}, PFN_vkAllocationFunction pfnAllocation_ = {}, PFN_vkReallocationFunction pfnReallocation_ = {}, PFN_vkFreeFunction pfnFree_ = {}, PFN_vkInternalAllocationNotification pfnInternalAllocation_ = {}, PFN_vkInternalFreeNotification pfnInternalFree_ = {}) VULKAN_HPP_NOEXCEPT
+    : pUserData( pUserData_ ), pfnAllocation( pfnAllocation_ ), pfnReallocation( pfnReallocation_ ), pfnFree( pfnFree_ ), pfnInternalAllocation( pfnInternalAllocation_ ), pfnInternalFree( pfnInternalFree_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR AllocationCallbacks( AllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AllocationCallbacks( VkAllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    AllocationCallbacks & operator=( VkAllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AllocationCallbacks const *>( &rhs );
+      return *this;
+    }
+
+    AllocationCallbacks & operator=( AllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( AllocationCallbacks ) );
+      return *this;
+    }
+
+    AllocationCallbacks & setPUserData( void* pUserData_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pUserData = pUserData_;
+      return *this;
+    }
+
+    AllocationCallbacks & setPfnAllocation( PFN_vkAllocationFunction pfnAllocation_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pfnAllocation = pfnAllocation_;
+      return *this;
+    }
+
+    AllocationCallbacks & setPfnReallocation( PFN_vkReallocationFunction pfnReallocation_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pfnReallocation = pfnReallocation_;
+      return *this;
+    }
+
+    AllocationCallbacks & setPfnFree( PFN_vkFreeFunction pfnFree_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pfnFree = pfnFree_;
+      return *this;
+    }
+
+    AllocationCallbacks & setPfnInternalAllocation( PFN_vkInternalAllocationNotification pfnInternalAllocation_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pfnInternalAllocation = pfnInternalAllocation_;
+      return *this;
+    }
+
+    AllocationCallbacks & setPfnInternalFree( PFN_vkInternalFreeNotification pfnInternalFree_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pfnInternalFree = pfnInternalFree_;
+      return *this;
+    }
+
+
+    operator VkAllocationCallbacks const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAllocationCallbacks*>( this );
+    }
+
+    operator VkAllocationCallbacks &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAllocationCallbacks*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( AllocationCallbacks const& ) const = default;
+#else
+    bool operator==( AllocationCallbacks const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( pUserData == rhs.pUserData )
+          && ( pfnAllocation == rhs.pfnAllocation )
+          && ( pfnReallocation == rhs.pfnReallocation )
+          && ( pfnFree == rhs.pfnFree )
+          && ( pfnInternalAllocation == rhs.pfnInternalAllocation )
+          && ( pfnInternalFree == rhs.pfnInternalFree );
+    }
+
+    bool operator!=( AllocationCallbacks const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    void* pUserData = {};
+    PFN_vkAllocationFunction pfnAllocation = {};
+    PFN_vkReallocationFunction pfnReallocation = {};
+    PFN_vkFreeFunction pfnFree = {};
+    PFN_vkInternalAllocationNotification pfnInternalAllocation = {};
+    PFN_vkInternalFreeNotification pfnInternalFree = {};
+
+  };
+  static_assert( sizeof( AllocationCallbacks ) == sizeof( VkAllocationCallbacks ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<AllocationCallbacks>::value, "struct wrapper is not a standard layout!" );
+
+  struct ComponentMapping
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ComponentMapping(VULKAN_HPP_NAMESPACE::ComponentSwizzle r_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity, VULKAN_HPP_NAMESPACE::ComponentSwizzle g_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity, VULKAN_HPP_NAMESPACE::ComponentSwizzle b_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity, VULKAN_HPP_NAMESPACE::ComponentSwizzle a_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity) VULKAN_HPP_NOEXCEPT
+    : r( r_ ), g( g_ ), b( b_ ), a( a_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ComponentMapping( ComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ComponentMapping( VkComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ComponentMapping & operator=( VkComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ComponentMapping const *>( &rhs );
+      return *this;
+    }
+
+    ComponentMapping & operator=( ComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ComponentMapping ) );
+      return *this;
+    }
+
+    ComponentMapping & setR( VULKAN_HPP_NAMESPACE::ComponentSwizzle r_ ) VULKAN_HPP_NOEXCEPT
+    {
+      r = r_;
+      return *this;
+    }
+
+    ComponentMapping & setG( VULKAN_HPP_NAMESPACE::ComponentSwizzle g_ ) VULKAN_HPP_NOEXCEPT
+    {
+      g = g_;
+      return *this;
+    }
+
+    ComponentMapping & setB( VULKAN_HPP_NAMESPACE::ComponentSwizzle b_ ) VULKAN_HPP_NOEXCEPT
+    {
+      b = b_;
+      return *this;
+    }
+
+    ComponentMapping & setA( VULKAN_HPP_NAMESPACE::ComponentSwizzle a_ ) VULKAN_HPP_NOEXCEPT
+    {
+      a = a_;
+      return *this;
+    }
+
+
+    operator VkComponentMapping const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkComponentMapping*>( this );
+    }
+
+    operator VkComponentMapping &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkComponentMapping*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ComponentMapping const& ) const = default;
+#else
+    bool operator==( ComponentMapping const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( r == rhs.r )
+          && ( g == rhs.g )
+          && ( b == rhs.b )
+          && ( a == rhs.a );
+    }
+
+    bool operator!=( ComponentMapping const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::ComponentSwizzle r = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity;
+    VULKAN_HPP_NAMESPACE::ComponentSwizzle g = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity;
+    VULKAN_HPP_NAMESPACE::ComponentSwizzle b = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity;
+    VULKAN_HPP_NAMESPACE::ComponentSwizzle a = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity;
+
+  };
+  static_assert( sizeof( ComponentMapping ) == sizeof( VkComponentMapping ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ComponentMapping>::value, "struct wrapper is not a standard layout!" );
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+  struct AndroidHardwareBufferFormatPropertiesANDROID
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidHardwareBufferFormatPropertiesANDROID;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatPropertiesANDROID(VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, uint64_t externalFormat_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures_ = {}, VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents_ = {}, VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity, VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull, VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven) VULKAN_HPP_NOEXCEPT
+    : format( format_ ), externalFormat( externalFormat_ ), formatFeatures( formatFeatures_ ), samplerYcbcrConversionComponents( samplerYcbcrConversionComponents_ ), suggestedYcbcrModel( suggestedYcbcrModel_ ), suggestedYcbcrRange( suggestedYcbcrRange_ ), suggestedXChromaOffset( suggestedXChromaOffset_ ), suggestedYChromaOffset( suggestedYChromaOffset_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatPropertiesANDROID( AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AndroidHardwareBufferFormatPropertiesANDROID( VkAndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    AndroidHardwareBufferFormatPropertiesANDROID & operator=( VkAndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatPropertiesANDROID const *>( &rhs );
+      return *this;
+    }
+
+    AndroidHardwareBufferFormatPropertiesANDROID & operator=( AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( AndroidHardwareBufferFormatPropertiesANDROID ) );
+      return *this;
+    }
+
+
+    operator VkAndroidHardwareBufferFormatPropertiesANDROID const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAndroidHardwareBufferFormatPropertiesANDROID*>( this );
+    }
+
+    operator VkAndroidHardwareBufferFormatPropertiesANDROID &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAndroidHardwareBufferFormatPropertiesANDROID*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( AndroidHardwareBufferFormatPropertiesANDROID const& ) const = default;
+#else
+    bool operator==( AndroidHardwareBufferFormatPropertiesANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( format == rhs.format )
+          && ( externalFormat == rhs.externalFormat )
+          && ( formatFeatures == rhs.formatFeatures )
+          && ( samplerYcbcrConversionComponents == rhs.samplerYcbcrConversionComponents )
+          && ( suggestedYcbcrModel == rhs.suggestedYcbcrModel )
+          && ( suggestedYcbcrRange == rhs.suggestedYcbcrRange )
+          && ( suggestedXChromaOffset == rhs.suggestedXChromaOffset )
+          && ( suggestedYChromaOffset == rhs.suggestedYChromaOffset );
+    }
+
+    bool operator!=( AndroidHardwareBufferFormatPropertiesANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferFormatPropertiesANDROID;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+    uint64_t externalFormat = {};
+    VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures = {};
+    VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents = {};
+    VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity;
+    VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull;
+    VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
+    VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
+
+  };
+  static_assert( sizeof( AndroidHardwareBufferFormatPropertiesANDROID ) == sizeof( VkAndroidHardwareBufferFormatPropertiesANDROID ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<AndroidHardwareBufferFormatPropertiesANDROID>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eAndroidHardwareBufferFormatPropertiesANDROID>
+  {
+    using Type = AndroidHardwareBufferFormatPropertiesANDROID;
+  };
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+  struct AndroidHardwareBufferPropertiesANDROID
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidHardwareBufferPropertiesANDROID;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR AndroidHardwareBufferPropertiesANDROID(VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ = {}, uint32_t memoryTypeBits_ = {}) VULKAN_HPP_NOEXCEPT
+    : allocationSize( allocationSize_ ), memoryTypeBits( memoryTypeBits_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR AndroidHardwareBufferPropertiesANDROID( AndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AndroidHardwareBufferPropertiesANDROID( VkAndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    AndroidHardwareBufferPropertiesANDROID & operator=( VkAndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID const *>( &rhs );
+      return *this;
+    }
+
+    AndroidHardwareBufferPropertiesANDROID & operator=( AndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( AndroidHardwareBufferPropertiesANDROID ) );
+      return *this;
+    }
+
+
+    operator VkAndroidHardwareBufferPropertiesANDROID const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAndroidHardwareBufferPropertiesANDROID*>( this );
+    }
+
+    operator VkAndroidHardwareBufferPropertiesANDROID &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( AndroidHardwareBufferPropertiesANDROID const& ) const = default;
+#else
+    bool operator==( AndroidHardwareBufferPropertiesANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( allocationSize == rhs.allocationSize )
+          && ( memoryTypeBits == rhs.memoryTypeBits );
+    }
+
+    bool operator!=( AndroidHardwareBufferPropertiesANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferPropertiesANDROID;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize allocationSize = {};
+    uint32_t memoryTypeBits = {};
+
+  };
+  static_assert( sizeof( AndroidHardwareBufferPropertiesANDROID ) == sizeof( VkAndroidHardwareBufferPropertiesANDROID ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<AndroidHardwareBufferPropertiesANDROID>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eAndroidHardwareBufferPropertiesANDROID>
+  {
+    using Type = AndroidHardwareBufferPropertiesANDROID;
+  };
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+  struct AndroidHardwareBufferUsageANDROID
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidHardwareBufferUsageANDROID;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR AndroidHardwareBufferUsageANDROID(uint64_t androidHardwareBufferUsage_ = {}) VULKAN_HPP_NOEXCEPT
+    : androidHardwareBufferUsage( androidHardwareBufferUsage_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR AndroidHardwareBufferUsageANDROID( AndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AndroidHardwareBufferUsageANDROID( VkAndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    AndroidHardwareBufferUsageANDROID & operator=( VkAndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferUsageANDROID const *>( &rhs );
+      return *this;
+    }
+
+    AndroidHardwareBufferUsageANDROID & operator=( AndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( AndroidHardwareBufferUsageANDROID ) );
+      return *this;
+    }
+
+
+    operator VkAndroidHardwareBufferUsageANDROID const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAndroidHardwareBufferUsageANDROID*>( this );
+    }
+
+    operator VkAndroidHardwareBufferUsageANDROID &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAndroidHardwareBufferUsageANDROID*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( AndroidHardwareBufferUsageANDROID const& ) const = default;
+#else
+    bool operator==( AndroidHardwareBufferUsageANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( androidHardwareBufferUsage == rhs.androidHardwareBufferUsage );
+    }
+
+    bool operator!=( AndroidHardwareBufferUsageANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferUsageANDROID;
+    void* pNext = {};
+    uint64_t androidHardwareBufferUsage = {};
+
+  };
+  static_assert( sizeof( AndroidHardwareBufferUsageANDROID ) == sizeof( VkAndroidHardwareBufferUsageANDROID ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<AndroidHardwareBufferUsageANDROID>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eAndroidHardwareBufferUsageANDROID>
+  {
+    using Type = AndroidHardwareBufferUsageANDROID;
+  };
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+  struct AndroidSurfaceCreateInfoKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidSurfaceCreateInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR AndroidSurfaceCreateInfoKHR(VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags_ = {}, struct ANativeWindow* window_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), window( window_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR AndroidSurfaceCreateInfoKHR( AndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AndroidSurfaceCreateInfoKHR( VkAndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    AndroidSurfaceCreateInfoKHR & operator=( VkAndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+    AndroidSurfaceCreateInfoKHR & operator=( AndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( AndroidSurfaceCreateInfoKHR ) );
+      return *this;
+    }
+
+    AndroidSurfaceCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    AndroidSurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    AndroidSurfaceCreateInfoKHR & setWindow( struct ANativeWindow* window_ ) VULKAN_HPP_NOEXCEPT
+    {
+      window = window_;
+      return *this;
+    }
+
+
+    operator VkAndroidSurfaceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR*>( this );
+    }
+
+    operator VkAndroidSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAndroidSurfaceCreateInfoKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( AndroidSurfaceCreateInfoKHR const& ) const = default;
+#else
+    bool operator==( AndroidSurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( window == rhs.window );
+    }
+
+    bool operator!=( AndroidSurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidSurfaceCreateInfoKHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags = {};
+    struct ANativeWindow* window = {};
+
+  };
+  static_assert( sizeof( AndroidSurfaceCreateInfoKHR ) == sizeof( VkAndroidSurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<AndroidSurfaceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eAndroidSurfaceCreateInfoKHR>
+  {
+    using Type = AndroidSurfaceCreateInfoKHR;
+  };
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+  struct ApplicationInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eApplicationInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ApplicationInfo(const char* pApplicationName_ = {}, uint32_t applicationVersion_ = {}, const char* pEngineName_ = {}, uint32_t engineVersion_ = {}, uint32_t apiVersion_ = {}) VULKAN_HPP_NOEXCEPT
+    : pApplicationName( pApplicationName_ ), applicationVersion( applicationVersion_ ), pEngineName( pEngineName_ ), engineVersion( engineVersion_ ), apiVersion( apiVersion_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ApplicationInfo( ApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ApplicationInfo( VkApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ApplicationInfo & operator=( VkApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ApplicationInfo const *>( &rhs );
+      return *this;
+    }
+
+    ApplicationInfo & operator=( ApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ApplicationInfo ) );
+      return *this;
+    }
+
+    ApplicationInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ApplicationInfo & setPApplicationName( const char* pApplicationName_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pApplicationName = pApplicationName_;
+      return *this;
+    }
+
+    ApplicationInfo & setApplicationVersion( uint32_t applicationVersion_ ) VULKAN_HPP_NOEXCEPT
+    {
+      applicationVersion = applicationVersion_;
+      return *this;
+    }
+
+    ApplicationInfo & setPEngineName( const char* pEngineName_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pEngineName = pEngineName_;
+      return *this;
+    }
+
+    ApplicationInfo & setEngineVersion( uint32_t engineVersion_ ) VULKAN_HPP_NOEXCEPT
+    {
+      engineVersion = engineVersion_;
+      return *this;
+    }
+
+    ApplicationInfo & setApiVersion( uint32_t apiVersion_ ) VULKAN_HPP_NOEXCEPT
+    {
+      apiVersion = apiVersion_;
+      return *this;
+    }
+
+
+    operator VkApplicationInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkApplicationInfo*>( this );
+    }
+
+    operator VkApplicationInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkApplicationInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ApplicationInfo const& ) const = default;
+#else
+    bool operator==( ApplicationInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pApplicationName == rhs.pApplicationName )
+          && ( applicationVersion == rhs.applicationVersion )
+          && ( pEngineName == rhs.pEngineName )
+          && ( engineVersion == rhs.engineVersion )
+          && ( apiVersion == rhs.apiVersion );
+    }
+
+    bool operator!=( ApplicationInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eApplicationInfo;
+    const void* pNext = {};
+    const char* pApplicationName = {};
+    uint32_t applicationVersion = {};
+    const char* pEngineName = {};
+    uint32_t engineVersion = {};
+    uint32_t apiVersion = {};
+
+  };
+  static_assert( sizeof( ApplicationInfo ) == sizeof( VkApplicationInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ApplicationInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eApplicationInfo>
+  {
+    using Type = ApplicationInfo;
+  };
+
+  struct AttachmentDescription
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR AttachmentDescription(VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), format( format_ ), samples( samples_ ), loadOp( loadOp_ ), storeOp( storeOp_ ), stencilLoadOp( stencilLoadOp_ ), stencilStoreOp( stencilStoreOp_ ), initialLayout( initialLayout_ ), finalLayout( finalLayout_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR AttachmentDescription( AttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AttachmentDescription( VkAttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    AttachmentDescription & operator=( VkAttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentDescription const *>( &rhs );
+      return *this;
+    }
+
+    AttachmentDescription & operator=( AttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( AttachmentDescription ) );
+      return *this;
+    }
+
+    AttachmentDescription & setFlags( VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    AttachmentDescription & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
+    {
+      format = format_;
+      return *this;
+    }
+
+    AttachmentDescription & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT
+    {
+      samples = samples_;
+      return *this;
+    }
+
+    AttachmentDescription & setLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      loadOp = loadOp_;
+      return *this;
+    }
+
+    AttachmentDescription & setStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      storeOp = storeOp_;
+      return *this;
+    }
+
+    AttachmentDescription & setStencilLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stencilLoadOp = stencilLoadOp_;
+      return *this;
+    }
+
+    AttachmentDescription & setStencilStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stencilStoreOp = stencilStoreOp_;
+      return *this;
+    }
+
+    AttachmentDescription & setInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      initialLayout = initialLayout_;
+      return *this;
+    }
+
+    AttachmentDescription & setFinalLayout( VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      finalLayout = finalLayout_;
+      return *this;
+    }
+
+
+    operator VkAttachmentDescription const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAttachmentDescription*>( this );
+    }
+
+    operator VkAttachmentDescription &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAttachmentDescription*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( AttachmentDescription const& ) const = default;
+#else
+    bool operator==( AttachmentDescription const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( flags == rhs.flags )
+          && ( format == rhs.format )
+          && ( samples == rhs.samples )
+          && ( loadOp == rhs.loadOp )
+          && ( storeOp == rhs.storeOp )
+          && ( stencilLoadOp == rhs.stencilLoadOp )
+          && ( stencilStoreOp == rhs.stencilStoreOp )
+          && ( initialLayout == rhs.initialLayout )
+          && ( finalLayout == rhs.finalLayout );
+    }
+
+    bool operator!=( AttachmentDescription const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags = {};
+    VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+    VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
+    VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad;
+    VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore;
+    VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad;
+    VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore;
+    VULKAN_HPP_NAMESPACE::ImageLayout initialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+    VULKAN_HPP_NAMESPACE::ImageLayout finalLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+
+  };
+  static_assert( sizeof( AttachmentDescription ) == sizeof( VkAttachmentDescription ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<AttachmentDescription>::value, "struct wrapper is not a standard layout!" );
+
+  struct AttachmentDescription2
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentDescription2;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR AttachmentDescription2(VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), format( format_ ), samples( samples_ ), loadOp( loadOp_ ), storeOp( storeOp_ ), stencilLoadOp( stencilLoadOp_ ), stencilStoreOp( stencilStoreOp_ ), initialLayout( initialLayout_ ), finalLayout( finalLayout_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR AttachmentDescription2( AttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AttachmentDescription2( VkAttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    AttachmentDescription2 & operator=( VkAttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentDescription2 const *>( &rhs );
+      return *this;
+    }
+
+    AttachmentDescription2 & operator=( AttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( AttachmentDescription2 ) );
+      return *this;
+    }
+
+    AttachmentDescription2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    AttachmentDescription2 & setFlags( VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    AttachmentDescription2 & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
+    {
+      format = format_;
+      return *this;
+    }
+
+    AttachmentDescription2 & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT
+    {
+      samples = samples_;
+      return *this;
+    }
+
+    AttachmentDescription2 & setLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      loadOp = loadOp_;
+      return *this;
+    }
+
+    AttachmentDescription2 & setStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      storeOp = storeOp_;
+      return *this;
+    }
+
+    AttachmentDescription2 & setStencilLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stencilLoadOp = stencilLoadOp_;
+      return *this;
+    }
+
+    AttachmentDescription2 & setStencilStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stencilStoreOp = stencilStoreOp_;
+      return *this;
+    }
+
+    AttachmentDescription2 & setInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      initialLayout = initialLayout_;
+      return *this;
+    }
+
+    AttachmentDescription2 & setFinalLayout( VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      finalLayout = finalLayout_;
+      return *this;
+    }
+
+
+    operator VkAttachmentDescription2 const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAttachmentDescription2*>( this );
+    }
+
+    operator VkAttachmentDescription2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAttachmentDescription2*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( AttachmentDescription2 const& ) const = default;
+#else
+    bool operator==( AttachmentDescription2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( format == rhs.format )
+          && ( samples == rhs.samples )
+          && ( loadOp == rhs.loadOp )
+          && ( storeOp == rhs.storeOp )
+          && ( stencilLoadOp == rhs.stencilLoadOp )
+          && ( stencilStoreOp == rhs.stencilStoreOp )
+          && ( initialLayout == rhs.initialLayout )
+          && ( finalLayout == rhs.finalLayout );
+    }
+
+    bool operator!=( AttachmentDescription2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentDescription2;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags = {};
+    VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+    VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
+    VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad;
+    VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore;
+    VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad;
+    VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore;
+    VULKAN_HPP_NAMESPACE::ImageLayout initialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+    VULKAN_HPP_NAMESPACE::ImageLayout finalLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+
+  };
+  static_assert( sizeof( AttachmentDescription2 ) == sizeof( VkAttachmentDescription2 ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<AttachmentDescription2>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eAttachmentDescription2>
+  {
+    using Type = AttachmentDescription2;
+  };
+  using AttachmentDescription2KHR = AttachmentDescription2;
+
+  struct AttachmentDescriptionStencilLayout
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentDescriptionStencilLayout;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR AttachmentDescriptionStencilLayout(VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined) VULKAN_HPP_NOEXCEPT
+    : stencilInitialLayout( stencilInitialLayout_ ), stencilFinalLayout( stencilFinalLayout_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR AttachmentDescriptionStencilLayout( AttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AttachmentDescriptionStencilLayout( VkAttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    AttachmentDescriptionStencilLayout & operator=( VkAttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentDescriptionStencilLayout const *>( &rhs );
+      return *this;
+    }
+
+    AttachmentDescriptionStencilLayout & operator=( AttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( AttachmentDescriptionStencilLayout ) );
+      return *this;
+    }
+
+    AttachmentDescriptionStencilLayout & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    AttachmentDescriptionStencilLayout & setStencilInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stencilInitialLayout = stencilInitialLayout_;
+      return *this;
+    }
+
+    AttachmentDescriptionStencilLayout & setStencilFinalLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stencilFinalLayout = stencilFinalLayout_;
+      return *this;
+    }
+
+
+    operator VkAttachmentDescriptionStencilLayout const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAttachmentDescriptionStencilLayout*>( this );
+    }
+
+    operator VkAttachmentDescriptionStencilLayout &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAttachmentDescriptionStencilLayout*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( AttachmentDescriptionStencilLayout const& ) const = default;
+#else
+    bool operator==( AttachmentDescriptionStencilLayout const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( stencilInitialLayout == rhs.stencilInitialLayout )
+          && ( stencilFinalLayout == rhs.stencilFinalLayout );
+    }
+
+    bool operator!=( AttachmentDescriptionStencilLayout const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentDescriptionStencilLayout;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+    VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+
+  };
+  static_assert( sizeof( AttachmentDescriptionStencilLayout ) == sizeof( VkAttachmentDescriptionStencilLayout ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<AttachmentDescriptionStencilLayout>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eAttachmentDescriptionStencilLayout>
+  {
+    using Type = AttachmentDescriptionStencilLayout;
+  };
+  using AttachmentDescriptionStencilLayoutKHR = AttachmentDescriptionStencilLayout;
+
+  struct AttachmentReference
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR AttachmentReference(uint32_t attachment_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout layout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined) VULKAN_HPP_NOEXCEPT
+    : attachment( attachment_ ), layout( layout_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR AttachmentReference( AttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AttachmentReference( VkAttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    AttachmentReference & operator=( VkAttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentReference const *>( &rhs );
+      return *this;
+    }
+
+    AttachmentReference & operator=( AttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( AttachmentReference ) );
+      return *this;
+    }
+
+    AttachmentReference & setAttachment( uint32_t attachment_ ) VULKAN_HPP_NOEXCEPT
+    {
+      attachment = attachment_;
+      return *this;
+    }
+
+    AttachmentReference & setLayout( VULKAN_HPP_NAMESPACE::ImageLayout layout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      layout = layout_;
+      return *this;
+    }
+
+
+    operator VkAttachmentReference const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAttachmentReference*>( this );
+    }
+
+    operator VkAttachmentReference &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAttachmentReference*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( AttachmentReference const& ) const = default;
+#else
+    bool operator==( AttachmentReference const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( attachment == rhs.attachment )
+          && ( layout == rhs.layout );
+    }
+
+    bool operator!=( AttachmentReference const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    uint32_t attachment = {};
+    VULKAN_HPP_NAMESPACE::ImageLayout layout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+
+  };
+  static_assert( sizeof( AttachmentReference ) == sizeof( VkAttachmentReference ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<AttachmentReference>::value, "struct wrapper is not a standard layout!" );
+
+  struct AttachmentReference2
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentReference2;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR AttachmentReference2(uint32_t attachment_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout layout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}) VULKAN_HPP_NOEXCEPT
+    : attachment( attachment_ ), layout( layout_ ), aspectMask( aspectMask_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR AttachmentReference2( AttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AttachmentReference2( VkAttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    AttachmentReference2 & operator=( VkAttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentReference2 const *>( &rhs );
+      return *this;
+    }
+
+    AttachmentReference2 & operator=( AttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( AttachmentReference2 ) );
+      return *this;
+    }
+
+    AttachmentReference2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    AttachmentReference2 & setAttachment( uint32_t attachment_ ) VULKAN_HPP_NOEXCEPT
+    {
+      attachment = attachment_;
+      return *this;
+    }
+
+    AttachmentReference2 & setLayout( VULKAN_HPP_NAMESPACE::ImageLayout layout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      layout = layout_;
+      return *this;
+    }
+
+    AttachmentReference2 & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      aspectMask = aspectMask_;
+      return *this;
+    }
+
+
+    operator VkAttachmentReference2 const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAttachmentReference2*>( this );
+    }
+
+    operator VkAttachmentReference2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAttachmentReference2*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( AttachmentReference2 const& ) const = default;
+#else
+    bool operator==( AttachmentReference2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( attachment == rhs.attachment )
+          && ( layout == rhs.layout )
+          && ( aspectMask == rhs.aspectMask );
+    }
+
+    bool operator!=( AttachmentReference2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentReference2;
+    const void* pNext = {};
+    uint32_t attachment = {};
+    VULKAN_HPP_NAMESPACE::ImageLayout layout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+    VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
+
+  };
+  static_assert( sizeof( AttachmentReference2 ) == sizeof( VkAttachmentReference2 ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<AttachmentReference2>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eAttachmentReference2>
+  {
+    using Type = AttachmentReference2;
+  };
+  using AttachmentReference2KHR = AttachmentReference2;
+
+  struct AttachmentReferenceStencilLayout
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentReferenceStencilLayout;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR AttachmentReferenceStencilLayout(VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined) VULKAN_HPP_NOEXCEPT
+    : stencilLayout( stencilLayout_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR AttachmentReferenceStencilLayout( AttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AttachmentReferenceStencilLayout( VkAttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    AttachmentReferenceStencilLayout & operator=( VkAttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentReferenceStencilLayout const *>( &rhs );
+      return *this;
+    }
+
+    AttachmentReferenceStencilLayout & operator=( AttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( AttachmentReferenceStencilLayout ) );
+      return *this;
+    }
+
+    AttachmentReferenceStencilLayout & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    AttachmentReferenceStencilLayout & setStencilLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stencilLayout = stencilLayout_;
+      return *this;
+    }
+
+
+    operator VkAttachmentReferenceStencilLayout const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAttachmentReferenceStencilLayout*>( this );
+    }
+
+    operator VkAttachmentReferenceStencilLayout &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAttachmentReferenceStencilLayout*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( AttachmentReferenceStencilLayout const& ) const = default;
+#else
+    bool operator==( AttachmentReferenceStencilLayout const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( stencilLayout == rhs.stencilLayout );
+    }
+
+    bool operator!=( AttachmentReferenceStencilLayout const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentReferenceStencilLayout;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+
+  };
+  static_assert( sizeof( AttachmentReferenceStencilLayout ) == sizeof( VkAttachmentReferenceStencilLayout ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<AttachmentReferenceStencilLayout>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eAttachmentReferenceStencilLayout>
+  {
+    using Type = AttachmentReferenceStencilLayout;
+  };
+  using AttachmentReferenceStencilLayoutKHR = AttachmentReferenceStencilLayout;
+
+  struct Extent2D
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR Extent2D(uint32_t width_ = {}, uint32_t height_ = {}) VULKAN_HPP_NOEXCEPT
+    : width( width_ ), height( height_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR Extent2D( Extent2D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    Extent2D( VkExtent2D const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    Extent2D & operator=( VkExtent2D const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Extent2D const *>( &rhs );
+      return *this;
+    }
+
+    Extent2D & operator=( Extent2D const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( Extent2D ) );
+      return *this;
+    }
+
+    Extent2D & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT
+    {
+      width = width_;
+      return *this;
+    }
+
+    Extent2D & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT
+    {
+      height = height_;
+      return *this;
+    }
+
+
+    operator VkExtent2D const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExtent2D*>( this );
+    }
+
+    operator VkExtent2D &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExtent2D*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( Extent2D const& ) const = default;
+#else
+    bool operator==( Extent2D const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( width == rhs.width )
+          && ( height == rhs.height );
+    }
+
+    bool operator!=( Extent2D const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    uint32_t width = {};
+    uint32_t height = {};
+
+  };
+  static_assert( sizeof( Extent2D ) == sizeof( VkExtent2D ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<Extent2D>::value, "struct wrapper is not a standard layout!" );
+
+  struct SampleLocationEXT
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR SampleLocationEXT(float x_ = {}, float y_ = {}) VULKAN_HPP_NOEXCEPT
+    : x( x_ ), y( y_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SampleLocationEXT( SampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SampleLocationEXT( VkSampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    SampleLocationEXT & operator=( VkSampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SampleLocationEXT const *>( &rhs );
+      return *this;
+    }
+
+    SampleLocationEXT & operator=( SampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( SampleLocationEXT ) );
+      return *this;
+    }
+
+    SampleLocationEXT & setX( float x_ ) VULKAN_HPP_NOEXCEPT
+    {
+      x = x_;
+      return *this;
+    }
+
+    SampleLocationEXT & setY( float y_ ) VULKAN_HPP_NOEXCEPT
+    {
+      y = y_;
+      return *this;
+    }
+
+
+    operator VkSampleLocationEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSampleLocationEXT*>( this );
+    }
+
+    operator VkSampleLocationEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSampleLocationEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( SampleLocationEXT const& ) const = default;
+#else
+    bool operator==( SampleLocationEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( x == rhs.x )
+          && ( y == rhs.y );
+    }
+
+    bool operator!=( SampleLocationEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    float x = {};
+    float y = {};
+
+  };
+  static_assert( sizeof( SampleLocationEXT ) == sizeof( VkSampleLocationEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SampleLocationEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct SampleLocationsInfoEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSampleLocationsInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR SampleLocationsInfoEXT(VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize_ = {}, uint32_t sampleLocationsCount_ = {}, const VULKAN_HPP_NAMESPACE::SampleLocationEXT* pSampleLocations_ = {}) VULKAN_HPP_NOEXCEPT
+    : sampleLocationsPerPixel( sampleLocationsPerPixel_ ), sampleLocationGridSize( sampleLocationGridSize_ ), sampleLocationsCount( sampleLocationsCount_ ), pSampleLocations( pSampleLocations_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SampleLocationsInfoEXT( SampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SampleLocationsInfoEXT( VkSampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    SampleLocationsInfoEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_, VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SampleLocationEXT> const & sampleLocations_ )
+    : sampleLocationsPerPixel( sampleLocationsPerPixel_ ), sampleLocationGridSize( sampleLocationGridSize_ ), sampleLocationsCount( static_cast<uint32_t>( sampleLocations_.size() ) ), pSampleLocations( sampleLocations_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    SampleLocationsInfoEXT & operator=( VkSampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+    SampleLocationsInfoEXT & operator=( SampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( SampleLocationsInfoEXT ) );
+      return *this;
+    }
+
+    SampleLocationsInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    SampleLocationsInfoEXT & setSampleLocationsPerPixel( VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sampleLocationsPerPixel = sampleLocationsPerPixel_;
+      return *this;
+    }
+
+    SampleLocationsInfoEXT & setSampleLocationGridSize( VULKAN_HPP_NAMESPACE::Extent2D const & sampleLocationGridSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sampleLocationGridSize = sampleLocationGridSize_;
+      return *this;
+    }
+
+    SampleLocationsInfoEXT & setSampleLocationsCount( uint32_t sampleLocationsCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sampleLocationsCount = sampleLocationsCount_;
+      return *this;
+    }
+
+    SampleLocationsInfoEXT & setPSampleLocations( const VULKAN_HPP_NAMESPACE::SampleLocationEXT* pSampleLocations_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pSampleLocations = pSampleLocations_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    SampleLocationsInfoEXT & setSampleLocations( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SampleLocationEXT> const & sampleLocations_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sampleLocationsCount = static_cast<uint32_t>( sampleLocations_.size() );
+      pSampleLocations = sampleLocations_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkSampleLocationsInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSampleLocationsInfoEXT*>( this );
+    }
+
+    operator VkSampleLocationsInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSampleLocationsInfoEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( SampleLocationsInfoEXT const& ) const = default;
+#else
+    bool operator==( SampleLocationsInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( sampleLocationsPerPixel == rhs.sampleLocationsPerPixel )
+          && ( sampleLocationGridSize == rhs.sampleLocationGridSize )
+          && ( sampleLocationsCount == rhs.sampleLocationsCount )
+          && ( pSampleLocations == rhs.pSampleLocations );
+    }
+
+    bool operator!=( SampleLocationsInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSampleLocationsInfoEXT;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
+    VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize = {};
+    uint32_t sampleLocationsCount = {};
+    const VULKAN_HPP_NAMESPACE::SampleLocationEXT* pSampleLocations = {};
+
+  };
+  static_assert( sizeof( SampleLocationsInfoEXT ) == sizeof( VkSampleLocationsInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SampleLocationsInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eSampleLocationsInfoEXT>
+  {
+    using Type = SampleLocationsInfoEXT;
+  };
+
+  struct AttachmentSampleLocationsEXT
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR AttachmentSampleLocationsEXT(uint32_t attachmentIndex_ = {}, VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {}) VULKAN_HPP_NOEXCEPT
+    : attachmentIndex( attachmentIndex_ ), sampleLocationsInfo( sampleLocationsInfo_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR AttachmentSampleLocationsEXT( AttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    AttachmentSampleLocationsEXT( VkAttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    AttachmentSampleLocationsEXT & operator=( VkAttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT const *>( &rhs );
+      return *this;
+    }
+
+    AttachmentSampleLocationsEXT & operator=( AttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( AttachmentSampleLocationsEXT ) );
+      return *this;
+    }
+
+    AttachmentSampleLocationsEXT & setAttachmentIndex( uint32_t attachmentIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      attachmentIndex = attachmentIndex_;
+      return *this;
+    }
+
+    AttachmentSampleLocationsEXT & setSampleLocationsInfo( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sampleLocationsInfo = sampleLocationsInfo_;
+      return *this;
+    }
+
+
+    operator VkAttachmentSampleLocationsEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAttachmentSampleLocationsEXT*>( this );
+    }
+
+    operator VkAttachmentSampleLocationsEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAttachmentSampleLocationsEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( AttachmentSampleLocationsEXT const& ) const = default;
+#else
+    bool operator==( AttachmentSampleLocationsEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( attachmentIndex == rhs.attachmentIndex )
+          && ( sampleLocationsInfo == rhs.sampleLocationsInfo );
+    }
+
+    bool operator!=( AttachmentSampleLocationsEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    uint32_t attachmentIndex = {};
+    VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo = {};
+
+  };
+  static_assert( sizeof( AttachmentSampleLocationsEXT ) == sizeof( VkAttachmentSampleLocationsEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<AttachmentSampleLocationsEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct BaseInStructure
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    BaseInStructure(VULKAN_HPP_NAMESPACE::StructureType sType_ = VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo) VULKAN_HPP_NOEXCEPT
+    : sType( sType_ )
+    {}
+
+    BaseInStructure( BaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    BaseInStructure( VkBaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    BaseInStructure & operator=( VkBaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BaseInStructure const *>( &rhs );
+      return *this;
+    }
+
+    BaseInStructure & operator=( BaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( BaseInStructure ) );
+      return *this;
+    }
+
+    BaseInStructure & setPNext( const struct VULKAN_HPP_NAMESPACE::BaseInStructure* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+
+    operator VkBaseInStructure const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBaseInStructure*>( this );
+    }
+
+    operator VkBaseInStructure &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBaseInStructure*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( BaseInStructure const& ) const = default;
+#else
+    bool operator==( BaseInStructure const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext );
+    }
+
+    bool operator!=( BaseInStructure const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo;
+    const struct VULKAN_HPP_NAMESPACE::BaseInStructure* pNext = {};
+
+  };
+  static_assert( sizeof( BaseInStructure ) == sizeof( VkBaseInStructure ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<BaseInStructure>::value, "struct wrapper is not a standard layout!" );
+
+  struct BaseOutStructure
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    BaseOutStructure(VULKAN_HPP_NAMESPACE::StructureType sType_ = VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo) VULKAN_HPP_NOEXCEPT
+    : sType( sType_ )
+    {}
+
+    BaseOutStructure( BaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    BaseOutStructure( VkBaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    BaseOutStructure & operator=( VkBaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BaseOutStructure const *>( &rhs );
+      return *this;
+    }
+
+    BaseOutStructure & operator=( BaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( BaseOutStructure ) );
+      return *this;
+    }
+
+    BaseOutStructure & setPNext( struct VULKAN_HPP_NAMESPACE::BaseOutStructure* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+
+    operator VkBaseOutStructure const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBaseOutStructure*>( this );
+    }
+
+    operator VkBaseOutStructure &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBaseOutStructure*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( BaseOutStructure const& ) const = default;
+#else
+    bool operator==( BaseOutStructure const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext );
+    }
+
+    bool operator!=( BaseOutStructure const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::StructureType sType = VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo;
+    struct VULKAN_HPP_NAMESPACE::BaseOutStructure* pNext = {};
+
+  };
+  static_assert( sizeof( BaseOutStructure ) == sizeof( VkBaseOutStructure ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<BaseOutStructure>::value, "struct wrapper is not a standard layout!" );
+
+  class DeviceMemory
+  {
+  public:
+    using CType = VkDeviceMemory;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDeviceMemory;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDeviceMemory;
+
+  public:
+    VULKAN_HPP_CONSTEXPR DeviceMemory() VULKAN_HPP_NOEXCEPT
+      : m_deviceMemory(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR DeviceMemory( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_deviceMemory(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT DeviceMemory( VkDeviceMemory deviceMemory ) VULKAN_HPP_NOEXCEPT
+      : m_deviceMemory( deviceMemory )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    DeviceMemory & operator=(VkDeviceMemory deviceMemory) VULKAN_HPP_NOEXCEPT
+    {
+      m_deviceMemory = deviceMemory;
+      return *this;
+    }
+#endif
+
+    DeviceMemory & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_deviceMemory = VK_NULL_HANDLE;
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DeviceMemory const& ) const = default;
+#else
+    bool operator==( DeviceMemory const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_deviceMemory == rhs.m_deviceMemory;
+    }
+
+    bool operator!=(DeviceMemory const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_deviceMemory != rhs.m_deviceMemory;
+    }
+
+    bool operator<(DeviceMemory const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_deviceMemory < rhs.m_deviceMemory;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDeviceMemory() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_deviceMemory;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_deviceMemory != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_deviceMemory == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkDeviceMemory m_deviceMemory;
+  };
+  static_assert( sizeof( VULKAN_HPP_NAMESPACE::DeviceMemory ) == sizeof( VkDeviceMemory ), "handle and wrapper have different size!" );
+
+  template <>
+  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eDeviceMemory>
+  {
+    using type = VULKAN_HPP_NAMESPACE::DeviceMemory;
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDeviceMemory>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::DeviceMemory;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDeviceMemory>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::DeviceMemory;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DeviceMemory>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  struct BindAccelerationStructureMemoryInfoKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindAccelerationStructureMemoryInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR BindAccelerationStructureMemoryInfoKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ = {}, VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, uint32_t deviceIndexCount_ = {}, const uint32_t* pDeviceIndices_ = {}) VULKAN_HPP_NOEXCEPT
+    : accelerationStructure( accelerationStructure_ ), memory( memory_ ), memoryOffset( memoryOffset_ ), deviceIndexCount( deviceIndexCount_ ), pDeviceIndices( pDeviceIndices_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR BindAccelerationStructureMemoryInfoKHR( BindAccelerationStructureMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    BindAccelerationStructureMemoryInfoKHR( VkBindAccelerationStructureMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    BindAccelerationStructureMemoryInfoKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_, VULKAN_HPP_NAMESPACE::DeviceMemory memory_, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_ )
+    : accelerationStructure( accelerationStructure_ ), memory( memory_ ), memoryOffset( memoryOffset_ ), deviceIndexCount( static_cast<uint32_t>( deviceIndices_.size() ) ), pDeviceIndices( deviceIndices_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    BindAccelerationStructureMemoryInfoKHR & operator=( VkBindAccelerationStructureMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+    BindAccelerationStructureMemoryInfoKHR & operator=( BindAccelerationStructureMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( BindAccelerationStructureMemoryInfoKHR ) );
+      return *this;
+    }
+
+    BindAccelerationStructureMemoryInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    BindAccelerationStructureMemoryInfoKHR & setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ ) VULKAN_HPP_NOEXCEPT
+    {
+      accelerationStructure = accelerationStructure_;
+      return *this;
+    }
+
+    BindAccelerationStructureMemoryInfoKHR & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
     {
       memory = memory_;
       return *this;
     }
 
-    BindAccelerationStructureMemoryInfoNV& setMemoryOffset( DeviceSize memoryOffset_ )
+    BindAccelerationStructureMemoryInfoKHR & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
     {
       memoryOffset = memoryOffset_;
       return *this;
     }
 
-    BindAccelerationStructureMemoryInfoNV& setDeviceIndexCount( uint32_t deviceIndexCount_ )
+    BindAccelerationStructureMemoryInfoKHR & setDeviceIndexCount( uint32_t deviceIndexCount_ ) VULKAN_HPP_NOEXCEPT
     {
       deviceIndexCount = deviceIndexCount_;
       return *this;
     }
 
-    BindAccelerationStructureMemoryInfoNV& setPDeviceIndices( const uint32_t* pDeviceIndices_ )
+    BindAccelerationStructureMemoryInfoKHR & setPDeviceIndices( const uint32_t* pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
     {
       pDeviceIndices = pDeviceIndices_;
       return *this;
     }
 
-    operator VkBindAccelerationStructureMemoryInfoNV const&() const
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    BindAccelerationStructureMemoryInfoKHR & setDeviceIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_ ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV*>(this);
+      deviceIndexCount = static_cast<uint32_t>( deviceIndices_.size() );
+      pDeviceIndices = deviceIndices_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkBindAccelerationStructureMemoryInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBindAccelerationStructureMemoryInfoKHR*>( this );
     }
 
-    operator VkBindAccelerationStructureMemoryInfoNV &()
+    operator VkBindAccelerationStructureMemoryInfoKHR &() VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkBindAccelerationStructureMemoryInfoNV*>(this);
+      return *reinterpret_cast<VkBindAccelerationStructureMemoryInfoKHR*>( this );
     }
 
-    bool operator==( BindAccelerationStructureMemoryInfoNV const& rhs ) const
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( BindAccelerationStructureMemoryInfoKHR const& ) const = default;
+#else
+    bool operator==( BindAccelerationStructureMemoryInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
@@ -20622,5596 +19454,1159 @@
           && ( pDeviceIndices == rhs.pDeviceIndices );
     }
 
-    bool operator!=( BindAccelerationStructureMemoryInfoNV const& rhs ) const
+    bool operator!=( BindAccelerationStructureMemoryInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
+#endif
 
-  private:
-    StructureType sType = StructureType::eBindAccelerationStructureMemoryInfoNV;
+
 
   public:
-    const void* pNext = nullptr;
-    AccelerationStructureNV accelerationStructure;
-    DeviceMemory memory;
-    DeviceSize memoryOffset;
-    uint32_t deviceIndexCount;
-    const uint32_t* pDeviceIndices;
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindAccelerationStructureMemoryInfoKHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure = {};
+    VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {};
+    uint32_t deviceIndexCount = {};
+    const uint32_t* pDeviceIndices = {};
+
   };
-  static_assert( sizeof( BindAccelerationStructureMemoryInfoNV ) == sizeof( VkBindAccelerationStructureMemoryInfoNV ), "struct and wrapper have different size!" );
+  static_assert( sizeof( BindAccelerationStructureMemoryInfoKHR ) == sizeof( VkBindAccelerationStructureMemoryInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<BindAccelerationStructureMemoryInfoKHR>::value, "struct wrapper is not a standard layout!" );
 
-  struct WriteDescriptorSetAccelerationStructureNV
+  template <>
+  struct CppType<StructureType, StructureType::eBindAccelerationStructureMemoryInfoKHR>
   {
-    WriteDescriptorSetAccelerationStructureNV( uint32_t accelerationStructureCount_ = 0,
-                                               const AccelerationStructureNV* pAccelerationStructures_ = nullptr )
-      : accelerationStructureCount( accelerationStructureCount_ )
-      , pAccelerationStructures( pAccelerationStructures_ )
+    using Type = BindAccelerationStructureMemoryInfoKHR;
+  };
+  using BindAccelerationStructureMemoryInfoNV = BindAccelerationStructureMemoryInfoKHR;
+
+  struct BindBufferMemoryDeviceGroupInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindBufferMemoryDeviceGroupInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR BindBufferMemoryDeviceGroupInfo(uint32_t deviceIndexCount_ = {}, const uint32_t* pDeviceIndices_ = {}) VULKAN_HPP_NOEXCEPT
+    : deviceIndexCount( deviceIndexCount_ ), pDeviceIndices( pDeviceIndices_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR BindBufferMemoryDeviceGroupInfo( BindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    BindBufferMemoryDeviceGroupInfo( VkBindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
     {
+      *this = rhs;
     }
 
-    WriteDescriptorSetAccelerationStructureNV( VkWriteDescriptorSetAccelerationStructureNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( WriteDescriptorSetAccelerationStructureNV ) );
-    }
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    BindBufferMemoryDeviceGroupInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_ )
+    : deviceIndexCount( static_cast<uint32_t>( deviceIndices_.size() ) ), pDeviceIndices( deviceIndices_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    WriteDescriptorSetAccelerationStructureNV& operator=( VkWriteDescriptorSetAccelerationStructureNV const & rhs )
+    BindBufferMemoryDeviceGroupInfo & operator=( VkBindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      memcpy( this, &rhs, sizeof( WriteDescriptorSetAccelerationStructureNV ) );
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindBufferMemoryDeviceGroupInfo const *>( &rhs );
       return *this;
     }
-    WriteDescriptorSetAccelerationStructureNV& setPNext( const void* pNext_ )
+
+    BindBufferMemoryDeviceGroupInfo & operator=( BindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( BindBufferMemoryDeviceGroupInfo ) );
+      return *this;
+    }
+
+    BindBufferMemoryDeviceGroupInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
       pNext = pNext_;
       return *this;
     }
 
-    WriteDescriptorSetAccelerationStructureNV& setAccelerationStructureCount( uint32_t accelerationStructureCount_ )
+    BindBufferMemoryDeviceGroupInfo & setDeviceIndexCount( uint32_t deviceIndexCount_ ) VULKAN_HPP_NOEXCEPT
     {
-      accelerationStructureCount = accelerationStructureCount_;
+      deviceIndexCount = deviceIndexCount_;
       return *this;
     }
 
-    WriteDescriptorSetAccelerationStructureNV& setPAccelerationStructures( const AccelerationStructureNV* pAccelerationStructures_ )
+    BindBufferMemoryDeviceGroupInfo & setPDeviceIndices( const uint32_t* pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
     {
-      pAccelerationStructures = pAccelerationStructures_;
+      pDeviceIndices = pDeviceIndices_;
       return *this;
     }
 
-    operator VkWriteDescriptorSetAccelerationStructureNV const&() const
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    BindBufferMemoryDeviceGroupInfo & setDeviceIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_ ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkWriteDescriptorSetAccelerationStructureNV*>(this);
+      deviceIndexCount = static_cast<uint32_t>( deviceIndices_.size() );
+      pDeviceIndices = deviceIndices_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkBindBufferMemoryDeviceGroupInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBindBufferMemoryDeviceGroupInfo*>( this );
     }
 
-    operator VkWriteDescriptorSetAccelerationStructureNV &()
+    operator VkBindBufferMemoryDeviceGroupInfo &() VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkWriteDescriptorSetAccelerationStructureNV*>(this);
+      return *reinterpret_cast<VkBindBufferMemoryDeviceGroupInfo*>( this );
     }
 
-    bool operator==( WriteDescriptorSetAccelerationStructureNV const& rhs ) const
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( BindBufferMemoryDeviceGroupInfo const& ) const = default;
+#else
+    bool operator==( BindBufferMemoryDeviceGroupInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
-          && ( accelerationStructureCount == rhs.accelerationStructureCount )
-          && ( pAccelerationStructures == rhs.pAccelerationStructures );
+          && ( deviceIndexCount == rhs.deviceIndexCount )
+          && ( pDeviceIndices == rhs.pDeviceIndices );
     }
 
-    bool operator!=( WriteDescriptorSetAccelerationStructureNV const& rhs ) const
+    bool operator!=( BindBufferMemoryDeviceGroupInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
+#endif
 
-  private:
-    StructureType sType = StructureType::eWriteDescriptorSetAccelerationStructureNV;
+
 
   public:
-    const void* pNext = nullptr;
-    uint32_t accelerationStructureCount;
-    const AccelerationStructureNV* pAccelerationStructures;
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindBufferMemoryDeviceGroupInfo;
+    const void* pNext = {};
+    uint32_t deviceIndexCount = {};
+    const uint32_t* pDeviceIndices = {};
+
   };
-  static_assert( sizeof( WriteDescriptorSetAccelerationStructureNV ) == sizeof( VkWriteDescriptorSetAccelerationStructureNV ), "struct and wrapper have different size!" );
+  static_assert( sizeof( BindBufferMemoryDeviceGroupInfo ) == sizeof( VkBindBufferMemoryDeviceGroupInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<BindBufferMemoryDeviceGroupInfo>::value, "struct wrapper is not a standard layout!" );
 
-  struct PhysicalDeviceRayTracingPropertiesNV
+  template <>
+  struct CppType<StructureType, StructureType::eBindBufferMemoryDeviceGroupInfo>
   {
-    PhysicalDeviceRayTracingPropertiesNV( uint32_t shaderGroupHandleSize_ = 0,
-                                          uint32_t maxRecursionDepth_ = 0,
-                                          uint32_t maxShaderGroupStride_ = 0,
-                                          uint32_t shaderGroupBaseAlignment_ = 0,
-                                          uint64_t maxGeometryCount_ = 0,
-                                          uint64_t maxInstanceCount_ = 0,
-                                          uint64_t maxTriangleCount_ = 0,
-                                          uint32_t maxDescriptorSetAccelerationStructures_ = 0 )
-      : shaderGroupHandleSize( shaderGroupHandleSize_ )
-      , maxRecursionDepth( maxRecursionDepth_ )
-      , maxShaderGroupStride( maxShaderGroupStride_ )
-      , shaderGroupBaseAlignment( shaderGroupBaseAlignment_ )
-      , maxGeometryCount( maxGeometryCount_ )
-      , maxInstanceCount( maxInstanceCount_ )
-      , maxTriangleCount( maxTriangleCount_ )
-      , maxDescriptorSetAccelerationStructures( maxDescriptorSetAccelerationStructures_ )
-    {
-    }
+    using Type = BindBufferMemoryDeviceGroupInfo;
+  };
+  using BindBufferMemoryDeviceGroupInfoKHR = BindBufferMemoryDeviceGroupInfo;
 
-    PhysicalDeviceRayTracingPropertiesNV( VkPhysicalDeviceRayTracingPropertiesNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceRayTracingPropertiesNV ) );
-    }
+  struct BindBufferMemoryInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindBufferMemoryInfo;
 
-    PhysicalDeviceRayTracingPropertiesNV& operator=( VkPhysicalDeviceRayTracingPropertiesNV const & rhs )
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR BindBufferMemoryInfo(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}) VULKAN_HPP_NOEXCEPT
+    : buffer( buffer_ ), memory( memory_ ), memoryOffset( memoryOffset_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR BindBufferMemoryInfo( BindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    BindBufferMemoryInfo( VkBindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceRayTracingPropertiesNV ) );
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    BindBufferMemoryInfo & operator=( VkBindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo const *>( &rhs );
       return *this;
     }
-    PhysicalDeviceRayTracingPropertiesNV& setPNext( void* pNext_ )
+
+    BindBufferMemoryInfo & operator=( BindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( BindBufferMemoryInfo ) );
+      return *this;
+    }
+
+    BindBufferMemoryInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
       pNext = pNext_;
       return *this;
     }
 
-    PhysicalDeviceRayTracingPropertiesNV& setShaderGroupHandleSize( uint32_t shaderGroupHandleSize_ )
-    {
-      shaderGroupHandleSize = shaderGroupHandleSize_;
-      return *this;
-    }
-
-    PhysicalDeviceRayTracingPropertiesNV& setMaxRecursionDepth( uint32_t maxRecursionDepth_ )
-    {
-      maxRecursionDepth = maxRecursionDepth_;
-      return *this;
-    }
-
-    PhysicalDeviceRayTracingPropertiesNV& setMaxShaderGroupStride( uint32_t maxShaderGroupStride_ )
-    {
-      maxShaderGroupStride = maxShaderGroupStride_;
-      return *this;
-    }
-
-    PhysicalDeviceRayTracingPropertiesNV& setShaderGroupBaseAlignment( uint32_t shaderGroupBaseAlignment_ )
-    {
-      shaderGroupBaseAlignment = shaderGroupBaseAlignment_;
-      return *this;
-    }
-
-    PhysicalDeviceRayTracingPropertiesNV& setMaxGeometryCount( uint64_t maxGeometryCount_ )
-    {
-      maxGeometryCount = maxGeometryCount_;
-      return *this;
-    }
-
-    PhysicalDeviceRayTracingPropertiesNV& setMaxInstanceCount( uint64_t maxInstanceCount_ )
-    {
-      maxInstanceCount = maxInstanceCount_;
-      return *this;
-    }
-
-    PhysicalDeviceRayTracingPropertiesNV& setMaxTriangleCount( uint64_t maxTriangleCount_ )
-    {
-      maxTriangleCount = maxTriangleCount_;
-      return *this;
-    }
-
-    PhysicalDeviceRayTracingPropertiesNV& setMaxDescriptorSetAccelerationStructures( uint32_t maxDescriptorSetAccelerationStructures_ )
-    {
-      maxDescriptorSetAccelerationStructures = maxDescriptorSetAccelerationStructures_;
-      return *this;
-    }
-
-    operator VkPhysicalDeviceRayTracingPropertiesNV const&() const
-    {
-      return *reinterpret_cast<const VkPhysicalDeviceRayTracingPropertiesNV*>(this);
-    }
-
-    operator VkPhysicalDeviceRayTracingPropertiesNV &()
-    {
-      return *reinterpret_cast<VkPhysicalDeviceRayTracingPropertiesNV*>(this);
-    }
-
-    bool operator==( PhysicalDeviceRayTracingPropertiesNV const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( shaderGroupHandleSize == rhs.shaderGroupHandleSize )
-          && ( maxRecursionDepth == rhs.maxRecursionDepth )
-          && ( maxShaderGroupStride == rhs.maxShaderGroupStride )
-          && ( shaderGroupBaseAlignment == rhs.shaderGroupBaseAlignment )
-          && ( maxGeometryCount == rhs.maxGeometryCount )
-          && ( maxInstanceCount == rhs.maxInstanceCount )
-          && ( maxTriangleCount == rhs.maxTriangleCount )
-          && ( maxDescriptorSetAccelerationStructures == rhs.maxDescriptorSetAccelerationStructures );
-    }
-
-    bool operator!=( PhysicalDeviceRayTracingPropertiesNV const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePhysicalDeviceRayTracingPropertiesNV;
-
-  public:
-    void* pNext = nullptr;
-    uint32_t shaderGroupHandleSize;
-    uint32_t maxRecursionDepth;
-    uint32_t maxShaderGroupStride;
-    uint32_t shaderGroupBaseAlignment;
-    uint64_t maxGeometryCount;
-    uint64_t maxInstanceCount;
-    uint64_t maxTriangleCount;
-    uint32_t maxDescriptorSetAccelerationStructures;
-  };
-  static_assert( sizeof( PhysicalDeviceRayTracingPropertiesNV ) == sizeof( VkPhysicalDeviceRayTracingPropertiesNV ), "struct and wrapper have different size!" );
-
-  struct PhysicalDeviceImageDrmFormatModifierInfoEXT
-  {
-    PhysicalDeviceImageDrmFormatModifierInfoEXT( uint64_t drmFormatModifier_ = 0,
-                                                 SharingMode sharingMode_ = SharingMode::eExclusive,
-                                                 uint32_t queueFamilyIndexCount_ = 0,
-                                                 const uint32_t* pQueueFamilyIndices_ = nullptr )
-      : drmFormatModifier( drmFormatModifier_ )
-      , sharingMode( sharingMode_ )
-      , queueFamilyIndexCount( queueFamilyIndexCount_ )
-      , pQueueFamilyIndices( pQueueFamilyIndices_ )
-    {
-    }
-
-    PhysicalDeviceImageDrmFormatModifierInfoEXT( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceImageDrmFormatModifierInfoEXT ) );
-    }
-
-    PhysicalDeviceImageDrmFormatModifierInfoEXT& operator=( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceImageDrmFormatModifierInfoEXT ) );
-      return *this;
-    }
-    PhysicalDeviceImageDrmFormatModifierInfoEXT& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    PhysicalDeviceImageDrmFormatModifierInfoEXT& setDrmFormatModifier( uint64_t drmFormatModifier_ )
-    {
-      drmFormatModifier = drmFormatModifier_;
-      return *this;
-    }
-
-    PhysicalDeviceImageDrmFormatModifierInfoEXT& setSharingMode( SharingMode sharingMode_ )
-    {
-      sharingMode = sharingMode_;
-      return *this;
-    }
-
-    PhysicalDeviceImageDrmFormatModifierInfoEXT& setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ )
-    {
-      queueFamilyIndexCount = queueFamilyIndexCount_;
-      return *this;
-    }
-
-    PhysicalDeviceImageDrmFormatModifierInfoEXT& setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ )
-    {
-      pQueueFamilyIndices = pQueueFamilyIndices_;
-      return *this;
-    }
-
-    operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT const&() const
-    {
-      return *reinterpret_cast<const VkPhysicalDeviceImageDrmFormatModifierInfoEXT*>(this);
-    }
-
-    operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT &()
-    {
-      return *reinterpret_cast<VkPhysicalDeviceImageDrmFormatModifierInfoEXT*>(this);
-    }
-
-    bool operator==( PhysicalDeviceImageDrmFormatModifierInfoEXT const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( drmFormatModifier == rhs.drmFormatModifier )
-          && ( sharingMode == rhs.sharingMode )
-          && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount )
-          && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices );
-    }
-
-    bool operator!=( PhysicalDeviceImageDrmFormatModifierInfoEXT const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT;
-
-  public:
-    const void* pNext = nullptr;
-    uint64_t drmFormatModifier;
-    SharingMode sharingMode;
-    uint32_t queueFamilyIndexCount;
-    const uint32_t* pQueueFamilyIndices;
-  };
-  static_assert( sizeof( PhysicalDeviceImageDrmFormatModifierInfoEXT ) == sizeof( VkPhysicalDeviceImageDrmFormatModifierInfoEXT ), "struct and wrapper have different size!" );
-
-  struct ImageDrmFormatModifierListCreateInfoEXT
-  {
-    ImageDrmFormatModifierListCreateInfoEXT( uint32_t drmFormatModifierCount_ = 0,
-                                             const uint64_t* pDrmFormatModifiers_ = nullptr )
-      : drmFormatModifierCount( drmFormatModifierCount_ )
-      , pDrmFormatModifiers( pDrmFormatModifiers_ )
-    {
-    }
-
-    ImageDrmFormatModifierListCreateInfoEXT( VkImageDrmFormatModifierListCreateInfoEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ImageDrmFormatModifierListCreateInfoEXT ) );
-    }
-
-    ImageDrmFormatModifierListCreateInfoEXT& operator=( VkImageDrmFormatModifierListCreateInfoEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ImageDrmFormatModifierListCreateInfoEXT ) );
-      return *this;
-    }
-    ImageDrmFormatModifierListCreateInfoEXT& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    ImageDrmFormatModifierListCreateInfoEXT& setDrmFormatModifierCount( uint32_t drmFormatModifierCount_ )
-    {
-      drmFormatModifierCount = drmFormatModifierCount_;
-      return *this;
-    }
-
-    ImageDrmFormatModifierListCreateInfoEXT& setPDrmFormatModifiers( const uint64_t* pDrmFormatModifiers_ )
-    {
-      pDrmFormatModifiers = pDrmFormatModifiers_;
-      return *this;
-    }
-
-    operator VkImageDrmFormatModifierListCreateInfoEXT const&() const
-    {
-      return *reinterpret_cast<const VkImageDrmFormatModifierListCreateInfoEXT*>(this);
-    }
-
-    operator VkImageDrmFormatModifierListCreateInfoEXT &()
-    {
-      return *reinterpret_cast<VkImageDrmFormatModifierListCreateInfoEXT*>(this);
-    }
-
-    bool operator==( ImageDrmFormatModifierListCreateInfoEXT const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( drmFormatModifierCount == rhs.drmFormatModifierCount )
-          && ( pDrmFormatModifiers == rhs.pDrmFormatModifiers );
-    }
-
-    bool operator!=( ImageDrmFormatModifierListCreateInfoEXT const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eImageDrmFormatModifierListCreateInfoEXT;
-
-  public:
-    const void* pNext = nullptr;
-    uint32_t drmFormatModifierCount;
-    const uint64_t* pDrmFormatModifiers;
-  };
-  static_assert( sizeof( ImageDrmFormatModifierListCreateInfoEXT ) == sizeof( VkImageDrmFormatModifierListCreateInfoEXT ), "struct and wrapper have different size!" );
-
-  struct ImageDrmFormatModifierExplicitCreateInfoEXT
-  {
-    ImageDrmFormatModifierExplicitCreateInfoEXT( uint64_t drmFormatModifier_ = 0,
-                                                 uint32_t drmFormatModifierPlaneCount_ = 0,
-                                                 const SubresourceLayout* pPlaneLayouts_ = nullptr )
-      : drmFormatModifier( drmFormatModifier_ )
-      , drmFormatModifierPlaneCount( drmFormatModifierPlaneCount_ )
-      , pPlaneLayouts( pPlaneLayouts_ )
-    {
-    }
-
-    ImageDrmFormatModifierExplicitCreateInfoEXT( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ImageDrmFormatModifierExplicitCreateInfoEXT ) );
-    }
-
-    ImageDrmFormatModifierExplicitCreateInfoEXT& operator=( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ImageDrmFormatModifierExplicitCreateInfoEXT ) );
-      return *this;
-    }
-    ImageDrmFormatModifierExplicitCreateInfoEXT& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    ImageDrmFormatModifierExplicitCreateInfoEXT& setDrmFormatModifier( uint64_t drmFormatModifier_ )
-    {
-      drmFormatModifier = drmFormatModifier_;
-      return *this;
-    }
-
-    ImageDrmFormatModifierExplicitCreateInfoEXT& setDrmFormatModifierPlaneCount( uint32_t drmFormatModifierPlaneCount_ )
-    {
-      drmFormatModifierPlaneCount = drmFormatModifierPlaneCount_;
-      return *this;
-    }
-
-    ImageDrmFormatModifierExplicitCreateInfoEXT& setPPlaneLayouts( const SubresourceLayout* pPlaneLayouts_ )
-    {
-      pPlaneLayouts = pPlaneLayouts_;
-      return *this;
-    }
-
-    operator VkImageDrmFormatModifierExplicitCreateInfoEXT const&() const
-    {
-      return *reinterpret_cast<const VkImageDrmFormatModifierExplicitCreateInfoEXT*>(this);
-    }
-
-    operator VkImageDrmFormatModifierExplicitCreateInfoEXT &()
-    {
-      return *reinterpret_cast<VkImageDrmFormatModifierExplicitCreateInfoEXT*>(this);
-    }
-
-    bool operator==( ImageDrmFormatModifierExplicitCreateInfoEXT const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( drmFormatModifier == rhs.drmFormatModifier )
-          && ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount )
-          && ( pPlaneLayouts == rhs.pPlaneLayouts );
-    }
-
-    bool operator!=( ImageDrmFormatModifierExplicitCreateInfoEXT const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT;
-
-  public:
-    const void* pNext = nullptr;
-    uint64_t drmFormatModifier;
-    uint32_t drmFormatModifierPlaneCount;
-    const SubresourceLayout* pPlaneLayouts;
-  };
-  static_assert( sizeof( ImageDrmFormatModifierExplicitCreateInfoEXT ) == sizeof( VkImageDrmFormatModifierExplicitCreateInfoEXT ), "struct and wrapper have different size!" );
-
-  struct ImageDrmFormatModifierPropertiesEXT
-  {
-    operator VkImageDrmFormatModifierPropertiesEXT const&() const
-    {
-      return *reinterpret_cast<const VkImageDrmFormatModifierPropertiesEXT*>(this);
-    }
-
-    operator VkImageDrmFormatModifierPropertiesEXT &()
-    {
-      return *reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT*>(this);
-    }
-
-    bool operator==( ImageDrmFormatModifierPropertiesEXT const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( drmFormatModifier == rhs.drmFormatModifier );
-    }
-
-    bool operator!=( ImageDrmFormatModifierPropertiesEXT const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eImageDrmFormatModifierPropertiesEXT;
-
-  public:
-    void* pNext = nullptr;
-    uint64_t drmFormatModifier;
-  };
-  static_assert( sizeof( ImageDrmFormatModifierPropertiesEXT ) == sizeof( VkImageDrmFormatModifierPropertiesEXT ), "struct and wrapper have different size!" );
-
-  enum class SubpassContents
-  {
-    eInline = VK_SUBPASS_CONTENTS_INLINE,
-    eSecondaryCommandBuffers = VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS
-  };
-
-  struct SubpassBeginInfoKHR
-  {
-    SubpassBeginInfoKHR( SubpassContents contents_ = SubpassContents::eInline )
-      : contents( contents_ )
-    {
-    }
-
-    SubpassBeginInfoKHR( VkSubpassBeginInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( SubpassBeginInfoKHR ) );
-    }
-
-    SubpassBeginInfoKHR& operator=( VkSubpassBeginInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( SubpassBeginInfoKHR ) );
-      return *this;
-    }
-    SubpassBeginInfoKHR& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    SubpassBeginInfoKHR& setContents( SubpassContents contents_ )
-    {
-      contents = contents_;
-      return *this;
-    }
-
-    operator VkSubpassBeginInfoKHR const&() const
-    {
-      return *reinterpret_cast<const VkSubpassBeginInfoKHR*>(this);
-    }
-
-    operator VkSubpassBeginInfoKHR &()
-    {
-      return *reinterpret_cast<VkSubpassBeginInfoKHR*>(this);
-    }
-
-    bool operator==( SubpassBeginInfoKHR const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( contents == rhs.contents );
-    }
-
-    bool operator!=( SubpassBeginInfoKHR const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eSubpassBeginInfoKHR;
-
-  public:
-    const void* pNext = nullptr;
-    SubpassContents contents;
-  };
-  static_assert( sizeof( SubpassBeginInfoKHR ) == sizeof( VkSubpassBeginInfoKHR ), "struct and wrapper have different size!" );
-
-  struct PresentInfoKHR
-  {
-    PresentInfoKHR( uint32_t waitSemaphoreCount_ = 0,
-                    const Semaphore* pWaitSemaphores_ = nullptr,
-                    uint32_t swapchainCount_ = 0,
-                    const SwapchainKHR* pSwapchains_ = nullptr,
-                    const uint32_t* pImageIndices_ = nullptr,
-                    Result* pResults_ = nullptr )
-      : waitSemaphoreCount( waitSemaphoreCount_ )
-      , pWaitSemaphores( pWaitSemaphores_ )
-      , swapchainCount( swapchainCount_ )
-      , pSwapchains( pSwapchains_ )
-      , pImageIndices( pImageIndices_ )
-      , pResults( pResults_ )
-    {
-    }
-
-    PresentInfoKHR( VkPresentInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PresentInfoKHR ) );
-    }
-
-    PresentInfoKHR& operator=( VkPresentInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PresentInfoKHR ) );
-      return *this;
-    }
-    PresentInfoKHR& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    PresentInfoKHR& setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ )
-    {
-      waitSemaphoreCount = waitSemaphoreCount_;
-      return *this;
-    }
-
-    PresentInfoKHR& setPWaitSemaphores( const Semaphore* pWaitSemaphores_ )
-    {
-      pWaitSemaphores = pWaitSemaphores_;
-      return *this;
-    }
-
-    PresentInfoKHR& setSwapchainCount( uint32_t swapchainCount_ )
-    {
-      swapchainCount = swapchainCount_;
-      return *this;
-    }
-
-    PresentInfoKHR& setPSwapchains( const SwapchainKHR* pSwapchains_ )
-    {
-      pSwapchains = pSwapchains_;
-      return *this;
-    }
-
-    PresentInfoKHR& setPImageIndices( const uint32_t* pImageIndices_ )
-    {
-      pImageIndices = pImageIndices_;
-      return *this;
-    }
-
-    PresentInfoKHR& setPResults( Result* pResults_ )
-    {
-      pResults = pResults_;
-      return *this;
-    }
-
-    operator VkPresentInfoKHR const&() const
-    {
-      return *reinterpret_cast<const VkPresentInfoKHR*>(this);
-    }
-
-    operator VkPresentInfoKHR &()
-    {
-      return *reinterpret_cast<VkPresentInfoKHR*>(this);
-    }
-
-    bool operator==( PresentInfoKHR const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( waitSemaphoreCount == rhs.waitSemaphoreCount )
-          && ( pWaitSemaphores == rhs.pWaitSemaphores )
-          && ( swapchainCount == rhs.swapchainCount )
-          && ( pSwapchains == rhs.pSwapchains )
-          && ( pImageIndices == rhs.pImageIndices )
-          && ( pResults == rhs.pResults );
-    }
-
-    bool operator!=( PresentInfoKHR const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePresentInfoKHR;
-
-  public:
-    const void* pNext = nullptr;
-    uint32_t waitSemaphoreCount;
-    const Semaphore* pWaitSemaphores;
-    uint32_t swapchainCount;
-    const SwapchainKHR* pSwapchains;
-    const uint32_t* pImageIndices;
-    Result* pResults;
-  };
-  static_assert( sizeof( PresentInfoKHR ) == sizeof( VkPresentInfoKHR ), "struct and wrapper have different size!" );
-
-  enum class DynamicState
-  {
-    eViewport = VK_DYNAMIC_STATE_VIEWPORT,
-    eScissor = VK_DYNAMIC_STATE_SCISSOR,
-    eLineWidth = VK_DYNAMIC_STATE_LINE_WIDTH,
-    eDepthBias = VK_DYNAMIC_STATE_DEPTH_BIAS,
-    eBlendConstants = VK_DYNAMIC_STATE_BLEND_CONSTANTS,
-    eDepthBounds = VK_DYNAMIC_STATE_DEPTH_BOUNDS,
-    eStencilCompareMask = VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK,
-    eStencilWriteMask = VK_DYNAMIC_STATE_STENCIL_WRITE_MASK,
-    eStencilReference = VK_DYNAMIC_STATE_STENCIL_REFERENCE,
-    eViewportWScalingNV = VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV,
-    eDiscardRectangleEXT = VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT,
-    eSampleLocationsEXT = VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT,
-    eViewportShadingRatePaletteNV = VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV,
-    eViewportCoarseSampleOrderNV = VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV,
-    eExclusiveScissorNV = VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV
-  };
-
-  struct PipelineDynamicStateCreateInfo
-  {
-    PipelineDynamicStateCreateInfo( PipelineDynamicStateCreateFlags flags_ = PipelineDynamicStateCreateFlags(),
-                                    uint32_t dynamicStateCount_ = 0,
-                                    const DynamicState* pDynamicStates_ = nullptr )
-      : flags( flags_ )
-      , dynamicStateCount( dynamicStateCount_ )
-      , pDynamicStates( pDynamicStates_ )
-    {
-    }
-
-    PipelineDynamicStateCreateInfo( VkPipelineDynamicStateCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PipelineDynamicStateCreateInfo ) );
-    }
-
-    PipelineDynamicStateCreateInfo& operator=( VkPipelineDynamicStateCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PipelineDynamicStateCreateInfo ) );
-      return *this;
-    }
-    PipelineDynamicStateCreateInfo& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    PipelineDynamicStateCreateInfo& setFlags( PipelineDynamicStateCreateFlags flags_ )
-    {
-      flags = flags_;
-      return *this;
-    }
-
-    PipelineDynamicStateCreateInfo& setDynamicStateCount( uint32_t dynamicStateCount_ )
-    {
-      dynamicStateCount = dynamicStateCount_;
-      return *this;
-    }
-
-    PipelineDynamicStateCreateInfo& setPDynamicStates( const DynamicState* pDynamicStates_ )
-    {
-      pDynamicStates = pDynamicStates_;
-      return *this;
-    }
-
-    operator VkPipelineDynamicStateCreateInfo const&() const
-    {
-      return *reinterpret_cast<const VkPipelineDynamicStateCreateInfo*>(this);
-    }
-
-    operator VkPipelineDynamicStateCreateInfo &()
-    {
-      return *reinterpret_cast<VkPipelineDynamicStateCreateInfo*>(this);
-    }
-
-    bool operator==( PipelineDynamicStateCreateInfo const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( dynamicStateCount == rhs.dynamicStateCount )
-          && ( pDynamicStates == rhs.pDynamicStates );
-    }
-
-    bool operator!=( PipelineDynamicStateCreateInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePipelineDynamicStateCreateInfo;
-
-  public:
-    const void* pNext = nullptr;
-    PipelineDynamicStateCreateFlags flags;
-    uint32_t dynamicStateCount;
-    const DynamicState* pDynamicStates;
-  };
-  static_assert( sizeof( PipelineDynamicStateCreateInfo ) == sizeof( VkPipelineDynamicStateCreateInfo ), "struct and wrapper have different size!" );
-
-  enum class DescriptorUpdateTemplateType
-  {
-    eDescriptorSet = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET,
-    eDescriptorSetKHR = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET,
-    ePushDescriptorsKHR = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR
-  };
-
-  struct DescriptorUpdateTemplateCreateInfo
-  {
-    DescriptorUpdateTemplateCreateInfo( DescriptorUpdateTemplateCreateFlags flags_ = DescriptorUpdateTemplateCreateFlags(),
-                                        uint32_t descriptorUpdateEntryCount_ = 0,
-                                        const DescriptorUpdateTemplateEntry* pDescriptorUpdateEntries_ = nullptr,
-                                        DescriptorUpdateTemplateType templateType_ = DescriptorUpdateTemplateType::eDescriptorSet,
-                                        DescriptorSetLayout descriptorSetLayout_ = DescriptorSetLayout(),
-                                        PipelineBindPoint pipelineBindPoint_ = PipelineBindPoint::eGraphics,
-                                        PipelineLayout pipelineLayout_ = PipelineLayout(),
-                                        uint32_t set_ = 0 )
-      : flags( flags_ )
-      , descriptorUpdateEntryCount( descriptorUpdateEntryCount_ )
-      , pDescriptorUpdateEntries( pDescriptorUpdateEntries_ )
-      , templateType( templateType_ )
-      , descriptorSetLayout( descriptorSetLayout_ )
-      , pipelineBindPoint( pipelineBindPoint_ )
-      , pipelineLayout( pipelineLayout_ )
-      , set( set_ )
-    {
-    }
-
-    DescriptorUpdateTemplateCreateInfo( VkDescriptorUpdateTemplateCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DescriptorUpdateTemplateCreateInfo ) );
-    }
-
-    DescriptorUpdateTemplateCreateInfo& operator=( VkDescriptorUpdateTemplateCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DescriptorUpdateTemplateCreateInfo ) );
-      return *this;
-    }
-    DescriptorUpdateTemplateCreateInfo& setPNext( void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    DescriptorUpdateTemplateCreateInfo& setFlags( DescriptorUpdateTemplateCreateFlags flags_ )
-    {
-      flags = flags_;
-      return *this;
-    }
-
-    DescriptorUpdateTemplateCreateInfo& setDescriptorUpdateEntryCount( uint32_t descriptorUpdateEntryCount_ )
-    {
-      descriptorUpdateEntryCount = descriptorUpdateEntryCount_;
-      return *this;
-    }
-
-    DescriptorUpdateTemplateCreateInfo& setPDescriptorUpdateEntries( const DescriptorUpdateTemplateEntry* pDescriptorUpdateEntries_ )
-    {
-      pDescriptorUpdateEntries = pDescriptorUpdateEntries_;
-      return *this;
-    }
-
-    DescriptorUpdateTemplateCreateInfo& setTemplateType( DescriptorUpdateTemplateType templateType_ )
-    {
-      templateType = templateType_;
-      return *this;
-    }
-
-    DescriptorUpdateTemplateCreateInfo& setDescriptorSetLayout( DescriptorSetLayout descriptorSetLayout_ )
-    {
-      descriptorSetLayout = descriptorSetLayout_;
-      return *this;
-    }
-
-    DescriptorUpdateTemplateCreateInfo& setPipelineBindPoint( PipelineBindPoint pipelineBindPoint_ )
-    {
-      pipelineBindPoint = pipelineBindPoint_;
-      return *this;
-    }
-
-    DescriptorUpdateTemplateCreateInfo& setPipelineLayout( PipelineLayout pipelineLayout_ )
-    {
-      pipelineLayout = pipelineLayout_;
-      return *this;
-    }
-
-    DescriptorUpdateTemplateCreateInfo& setSet( uint32_t set_ )
-    {
-      set = set_;
-      return *this;
-    }
-
-    operator VkDescriptorUpdateTemplateCreateInfo const&() const
-    {
-      return *reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo*>(this);
-    }
-
-    operator VkDescriptorUpdateTemplateCreateInfo &()
-    {
-      return *reinterpret_cast<VkDescriptorUpdateTemplateCreateInfo*>(this);
-    }
-
-    bool operator==( DescriptorUpdateTemplateCreateInfo const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( descriptorUpdateEntryCount == rhs.descriptorUpdateEntryCount )
-          && ( pDescriptorUpdateEntries == rhs.pDescriptorUpdateEntries )
-          && ( templateType == rhs.templateType )
-          && ( descriptorSetLayout == rhs.descriptorSetLayout )
-          && ( pipelineBindPoint == rhs.pipelineBindPoint )
-          && ( pipelineLayout == rhs.pipelineLayout )
-          && ( set == rhs.set );
-    }
-
-    bool operator!=( DescriptorUpdateTemplateCreateInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eDescriptorUpdateTemplateCreateInfo;
-
-  public:
-    void* pNext = nullptr;
-    DescriptorUpdateTemplateCreateFlags flags;
-    uint32_t descriptorUpdateEntryCount;
-    const DescriptorUpdateTemplateEntry* pDescriptorUpdateEntries;
-    DescriptorUpdateTemplateType templateType;
-    DescriptorSetLayout descriptorSetLayout;
-    PipelineBindPoint pipelineBindPoint;
-    PipelineLayout pipelineLayout;
-    uint32_t set;
-  };
-  static_assert( sizeof( DescriptorUpdateTemplateCreateInfo ) == sizeof( VkDescriptorUpdateTemplateCreateInfo ), "struct and wrapper have different size!" );
-
-  using DescriptorUpdateTemplateCreateInfoKHR = DescriptorUpdateTemplateCreateInfo;
-
-  enum class ObjectType
-  {
-    eUnknown = VK_OBJECT_TYPE_UNKNOWN,
-    eInstance = VK_OBJECT_TYPE_INSTANCE,
-    ePhysicalDevice = VK_OBJECT_TYPE_PHYSICAL_DEVICE,
-    eDevice = VK_OBJECT_TYPE_DEVICE,
-    eQueue = VK_OBJECT_TYPE_QUEUE,
-    eSemaphore = VK_OBJECT_TYPE_SEMAPHORE,
-    eCommandBuffer = VK_OBJECT_TYPE_COMMAND_BUFFER,
-    eFence = VK_OBJECT_TYPE_FENCE,
-    eDeviceMemory = VK_OBJECT_TYPE_DEVICE_MEMORY,
-    eBuffer = VK_OBJECT_TYPE_BUFFER,
-    eImage = VK_OBJECT_TYPE_IMAGE,
-    eEvent = VK_OBJECT_TYPE_EVENT,
-    eQueryPool = VK_OBJECT_TYPE_QUERY_POOL,
-    eBufferView = VK_OBJECT_TYPE_BUFFER_VIEW,
-    eImageView = VK_OBJECT_TYPE_IMAGE_VIEW,
-    eShaderModule = VK_OBJECT_TYPE_SHADER_MODULE,
-    ePipelineCache = VK_OBJECT_TYPE_PIPELINE_CACHE,
-    ePipelineLayout = VK_OBJECT_TYPE_PIPELINE_LAYOUT,
-    eRenderPass = VK_OBJECT_TYPE_RENDER_PASS,
-    ePipeline = VK_OBJECT_TYPE_PIPELINE,
-    eDescriptorSetLayout = VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT,
-    eSampler = VK_OBJECT_TYPE_SAMPLER,
-    eDescriptorPool = VK_OBJECT_TYPE_DESCRIPTOR_POOL,
-    eDescriptorSet = VK_OBJECT_TYPE_DESCRIPTOR_SET,
-    eFramebuffer = VK_OBJECT_TYPE_FRAMEBUFFER,
-    eCommandPool = VK_OBJECT_TYPE_COMMAND_POOL,
-    eSamplerYcbcrConversion = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION,
-    eSamplerYcbcrConversionKHR = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION,
-    eDescriptorUpdateTemplate = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE,
-    eDescriptorUpdateTemplateKHR = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE,
-    eSurfaceKHR = VK_OBJECT_TYPE_SURFACE_KHR,
-    eSwapchainKHR = VK_OBJECT_TYPE_SWAPCHAIN_KHR,
-    eDisplayKHR = VK_OBJECT_TYPE_DISPLAY_KHR,
-    eDisplayModeKHR = VK_OBJECT_TYPE_DISPLAY_MODE_KHR,
-    eDebugReportCallbackEXT = VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT,
-    eObjectTableNVX = VK_OBJECT_TYPE_OBJECT_TABLE_NVX,
-    eIndirectCommandsLayoutNVX = VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX,
-    eDebugUtilsMessengerEXT = VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT,
-    eValidationCacheEXT = VK_OBJECT_TYPE_VALIDATION_CACHE_EXT,
-    eAccelerationStructureNV = VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV
-  };
-
-  struct DebugUtilsObjectNameInfoEXT
-  {
-    DebugUtilsObjectNameInfoEXT( ObjectType objectType_ = ObjectType::eUnknown,
-                                 uint64_t objectHandle_ = 0,
-                                 const char* pObjectName_ = nullptr )
-      : objectType( objectType_ )
-      , objectHandle( objectHandle_ )
-      , pObjectName( pObjectName_ )
-    {
-    }
-
-    DebugUtilsObjectNameInfoEXT( VkDebugUtilsObjectNameInfoEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DebugUtilsObjectNameInfoEXT ) );
-    }
-
-    DebugUtilsObjectNameInfoEXT& operator=( VkDebugUtilsObjectNameInfoEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DebugUtilsObjectNameInfoEXT ) );
-      return *this;
-    }
-    DebugUtilsObjectNameInfoEXT& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    DebugUtilsObjectNameInfoEXT& setObjectType( ObjectType objectType_ )
-    {
-      objectType = objectType_;
-      return *this;
-    }
-
-    DebugUtilsObjectNameInfoEXT& setObjectHandle( uint64_t objectHandle_ )
-    {
-      objectHandle = objectHandle_;
-      return *this;
-    }
-
-    DebugUtilsObjectNameInfoEXT& setPObjectName( const char* pObjectName_ )
-    {
-      pObjectName = pObjectName_;
-      return *this;
-    }
-
-    operator VkDebugUtilsObjectNameInfoEXT const&() const
-    {
-      return *reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT*>(this);
-    }
-
-    operator VkDebugUtilsObjectNameInfoEXT &()
-    {
-      return *reinterpret_cast<VkDebugUtilsObjectNameInfoEXT*>(this);
-    }
-
-    bool operator==( DebugUtilsObjectNameInfoEXT const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( objectType == rhs.objectType )
-          && ( objectHandle == rhs.objectHandle )
-          && ( pObjectName == rhs.pObjectName );
-    }
-
-    bool operator!=( DebugUtilsObjectNameInfoEXT const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eDebugUtilsObjectNameInfoEXT;
-
-  public:
-    const void* pNext = nullptr;
-    ObjectType objectType;
-    uint64_t objectHandle;
-    const char* pObjectName;
-  };
-  static_assert( sizeof( DebugUtilsObjectNameInfoEXT ) == sizeof( VkDebugUtilsObjectNameInfoEXT ), "struct and wrapper have different size!" );
-
-  struct DebugUtilsObjectTagInfoEXT
-  {
-    DebugUtilsObjectTagInfoEXT( ObjectType objectType_ = ObjectType::eUnknown,
-                                uint64_t objectHandle_ = 0,
-                                uint64_t tagName_ = 0,
-                                size_t tagSize_ = 0,
-                                const void* pTag_ = nullptr )
-      : objectType( objectType_ )
-      , objectHandle( objectHandle_ )
-      , tagName( tagName_ )
-      , tagSize( tagSize_ )
-      , pTag( pTag_ )
-    {
-    }
-
-    DebugUtilsObjectTagInfoEXT( VkDebugUtilsObjectTagInfoEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DebugUtilsObjectTagInfoEXT ) );
-    }
-
-    DebugUtilsObjectTagInfoEXT& operator=( VkDebugUtilsObjectTagInfoEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DebugUtilsObjectTagInfoEXT ) );
-      return *this;
-    }
-    DebugUtilsObjectTagInfoEXT& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    DebugUtilsObjectTagInfoEXT& setObjectType( ObjectType objectType_ )
-    {
-      objectType = objectType_;
-      return *this;
-    }
-
-    DebugUtilsObjectTagInfoEXT& setObjectHandle( uint64_t objectHandle_ )
-    {
-      objectHandle = objectHandle_;
-      return *this;
-    }
-
-    DebugUtilsObjectTagInfoEXT& setTagName( uint64_t tagName_ )
-    {
-      tagName = tagName_;
-      return *this;
-    }
-
-    DebugUtilsObjectTagInfoEXT& setTagSize( size_t tagSize_ )
-    {
-      tagSize = tagSize_;
-      return *this;
-    }
-
-    DebugUtilsObjectTagInfoEXT& setPTag( const void* pTag_ )
-    {
-      pTag = pTag_;
-      return *this;
-    }
-
-    operator VkDebugUtilsObjectTagInfoEXT const&() const
-    {
-      return *reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT*>(this);
-    }
-
-    operator VkDebugUtilsObjectTagInfoEXT &()
-    {
-      return *reinterpret_cast<VkDebugUtilsObjectTagInfoEXT*>(this);
-    }
-
-    bool operator==( DebugUtilsObjectTagInfoEXT const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( objectType == rhs.objectType )
-          && ( objectHandle == rhs.objectHandle )
-          && ( tagName == rhs.tagName )
-          && ( tagSize == rhs.tagSize )
-          && ( pTag == rhs.pTag );
-    }
-
-    bool operator!=( DebugUtilsObjectTagInfoEXT const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eDebugUtilsObjectTagInfoEXT;
-
-  public:
-    const void* pNext = nullptr;
-    ObjectType objectType;
-    uint64_t objectHandle;
-    uint64_t tagName;
-    size_t tagSize;
-    const void* pTag;
-  };
-  static_assert( sizeof( DebugUtilsObjectTagInfoEXT ) == sizeof( VkDebugUtilsObjectTagInfoEXT ), "struct and wrapper have different size!" );
-
-  struct DebugUtilsMessengerCallbackDataEXT
-  {
-    DebugUtilsMessengerCallbackDataEXT( DebugUtilsMessengerCallbackDataFlagsEXT flags_ = DebugUtilsMessengerCallbackDataFlagsEXT(),
-                                        const char* pMessageIdName_ = nullptr,
-                                        int32_t messageIdNumber_ = 0,
-                                        const char* pMessage_ = nullptr,
-                                        uint32_t queueLabelCount_ = 0,
-                                        DebugUtilsLabelEXT* pQueueLabels_ = nullptr,
-                                        uint32_t cmdBufLabelCount_ = 0,
-                                        DebugUtilsLabelEXT* pCmdBufLabels_ = nullptr,
-                                        uint32_t objectCount_ = 0,
-                                        DebugUtilsObjectNameInfoEXT* pObjects_ = nullptr )
-      : flags( flags_ )
-      , pMessageIdName( pMessageIdName_ )
-      , messageIdNumber( messageIdNumber_ )
-      , pMessage( pMessage_ )
-      , queueLabelCount( queueLabelCount_ )
-      , pQueueLabels( pQueueLabels_ )
-      , cmdBufLabelCount( cmdBufLabelCount_ )
-      , pCmdBufLabels( pCmdBufLabels_ )
-      , objectCount( objectCount_ )
-      , pObjects( pObjects_ )
-    {
-    }
-
-    DebugUtilsMessengerCallbackDataEXT( VkDebugUtilsMessengerCallbackDataEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DebugUtilsMessengerCallbackDataEXT ) );
-    }
-
-    DebugUtilsMessengerCallbackDataEXT& operator=( VkDebugUtilsMessengerCallbackDataEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DebugUtilsMessengerCallbackDataEXT ) );
-      return *this;
-    }
-    DebugUtilsMessengerCallbackDataEXT& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    DebugUtilsMessengerCallbackDataEXT& setFlags( DebugUtilsMessengerCallbackDataFlagsEXT flags_ )
-    {
-      flags = flags_;
-      return *this;
-    }
-
-    DebugUtilsMessengerCallbackDataEXT& setPMessageIdName( const char* pMessageIdName_ )
-    {
-      pMessageIdName = pMessageIdName_;
-      return *this;
-    }
-
-    DebugUtilsMessengerCallbackDataEXT& setMessageIdNumber( int32_t messageIdNumber_ )
-    {
-      messageIdNumber = messageIdNumber_;
-      return *this;
-    }
-
-    DebugUtilsMessengerCallbackDataEXT& setPMessage( const char* pMessage_ )
-    {
-      pMessage = pMessage_;
-      return *this;
-    }
-
-    DebugUtilsMessengerCallbackDataEXT& setQueueLabelCount( uint32_t queueLabelCount_ )
-    {
-      queueLabelCount = queueLabelCount_;
-      return *this;
-    }
-
-    DebugUtilsMessengerCallbackDataEXT& setPQueueLabels( DebugUtilsLabelEXT* pQueueLabels_ )
-    {
-      pQueueLabels = pQueueLabels_;
-      return *this;
-    }
-
-    DebugUtilsMessengerCallbackDataEXT& setCmdBufLabelCount( uint32_t cmdBufLabelCount_ )
-    {
-      cmdBufLabelCount = cmdBufLabelCount_;
-      return *this;
-    }
-
-    DebugUtilsMessengerCallbackDataEXT& setPCmdBufLabels( DebugUtilsLabelEXT* pCmdBufLabels_ )
-    {
-      pCmdBufLabels = pCmdBufLabels_;
-      return *this;
-    }
-
-    DebugUtilsMessengerCallbackDataEXT& setObjectCount( uint32_t objectCount_ )
-    {
-      objectCount = objectCount_;
-      return *this;
-    }
-
-    DebugUtilsMessengerCallbackDataEXT& setPObjects( DebugUtilsObjectNameInfoEXT* pObjects_ )
-    {
-      pObjects = pObjects_;
-      return *this;
-    }
-
-    operator VkDebugUtilsMessengerCallbackDataEXT const&() const
-    {
-      return *reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT*>(this);
-    }
-
-    operator VkDebugUtilsMessengerCallbackDataEXT &()
-    {
-      return *reinterpret_cast<VkDebugUtilsMessengerCallbackDataEXT*>(this);
-    }
-
-    bool operator==( DebugUtilsMessengerCallbackDataEXT const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( pMessageIdName == rhs.pMessageIdName )
-          && ( messageIdNumber == rhs.messageIdNumber )
-          && ( pMessage == rhs.pMessage )
-          && ( queueLabelCount == rhs.queueLabelCount )
-          && ( pQueueLabels == rhs.pQueueLabels )
-          && ( cmdBufLabelCount == rhs.cmdBufLabelCount )
-          && ( pCmdBufLabels == rhs.pCmdBufLabels )
-          && ( objectCount == rhs.objectCount )
-          && ( pObjects == rhs.pObjects );
-    }
-
-    bool operator!=( DebugUtilsMessengerCallbackDataEXT const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eDebugUtilsMessengerCallbackDataEXT;
-
-  public:
-    const void* pNext = nullptr;
-    DebugUtilsMessengerCallbackDataFlagsEXT flags;
-    const char* pMessageIdName;
-    int32_t messageIdNumber;
-    const char* pMessage;
-    uint32_t queueLabelCount;
-    DebugUtilsLabelEXT* pQueueLabels;
-    uint32_t cmdBufLabelCount;
-    DebugUtilsLabelEXT* pCmdBufLabels;
-    uint32_t objectCount;
-    DebugUtilsObjectNameInfoEXT* pObjects;
-  };
-  static_assert( sizeof( DebugUtilsMessengerCallbackDataEXT ) == sizeof( VkDebugUtilsMessengerCallbackDataEXT ), "struct and wrapper have different size!" );
-
-  enum class QueueFlagBits
-  {
-    eGraphics = VK_QUEUE_GRAPHICS_BIT,
-    eCompute = VK_QUEUE_COMPUTE_BIT,
-    eTransfer = VK_QUEUE_TRANSFER_BIT,
-    eSparseBinding = VK_QUEUE_SPARSE_BINDING_BIT,
-    eProtected = VK_QUEUE_PROTECTED_BIT
-  };
-
-  using QueueFlags = Flags<QueueFlagBits, VkQueueFlags>;
-
-  VULKAN_HPP_INLINE QueueFlags operator|( QueueFlagBits bit0, QueueFlagBits bit1 )
-  {
-    return QueueFlags( bit0 ) | bit1;
-  }
-
-  VULKAN_HPP_INLINE QueueFlags operator~( QueueFlagBits bits )
-  {
-    return ~( QueueFlags( bits ) );
-  }
-
-  template <> struct FlagTraits<QueueFlagBits>
-  {
-    enum
-    {
-      allFlags = VkFlags(QueueFlagBits::eGraphics) | VkFlags(QueueFlagBits::eCompute) | VkFlags(QueueFlagBits::eTransfer) | VkFlags(QueueFlagBits::eSparseBinding) | VkFlags(QueueFlagBits::eProtected)
-    };
-  };
-
-  struct QueueFamilyProperties
-  {
-    operator VkQueueFamilyProperties const&() const
-    {
-      return *reinterpret_cast<const VkQueueFamilyProperties*>(this);
-    }
-
-    operator VkQueueFamilyProperties &()
-    {
-      return *reinterpret_cast<VkQueueFamilyProperties*>(this);
-    }
-
-    bool operator==( QueueFamilyProperties const& rhs ) const
-    {
-      return ( queueFlags == rhs.queueFlags )
-          && ( queueCount == rhs.queueCount )
-          && ( timestampValidBits == rhs.timestampValidBits )
-          && ( minImageTransferGranularity == rhs.minImageTransferGranularity );
-    }
-
-    bool operator!=( QueueFamilyProperties const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    QueueFlags queueFlags;
-    uint32_t queueCount;
-    uint32_t timestampValidBits;
-    Extent3D minImageTransferGranularity;
-  };
-  static_assert( sizeof( QueueFamilyProperties ) == sizeof( VkQueueFamilyProperties ), "struct and wrapper have different size!" );
-
-  struct QueueFamilyProperties2
-  {
-    operator VkQueueFamilyProperties2 const&() const
-    {
-      return *reinterpret_cast<const VkQueueFamilyProperties2*>(this);
-    }
-
-    operator VkQueueFamilyProperties2 &()
-    {
-      return *reinterpret_cast<VkQueueFamilyProperties2*>(this);
-    }
-
-    bool operator==( QueueFamilyProperties2 const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( queueFamilyProperties == rhs.queueFamilyProperties );
-    }
-
-    bool operator!=( QueueFamilyProperties2 const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eQueueFamilyProperties2;
-
-  public:
-    void* pNext = nullptr;
-    QueueFamilyProperties queueFamilyProperties;
-  };
-  static_assert( sizeof( QueueFamilyProperties2 ) == sizeof( VkQueueFamilyProperties2 ), "struct and wrapper have different size!" );
-
-  using QueueFamilyProperties2KHR = QueueFamilyProperties2;
-
-  enum class DeviceQueueCreateFlagBits
-  {
-    eProtected = VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT
-  };
-
-  using DeviceQueueCreateFlags = Flags<DeviceQueueCreateFlagBits, VkDeviceQueueCreateFlags>;
-
-  VULKAN_HPP_INLINE DeviceQueueCreateFlags operator|( DeviceQueueCreateFlagBits bit0, DeviceQueueCreateFlagBits bit1 )
-  {
-    return DeviceQueueCreateFlags( bit0 ) | bit1;
-  }
-
-  VULKAN_HPP_INLINE DeviceQueueCreateFlags operator~( DeviceQueueCreateFlagBits bits )
-  {
-    return ~( DeviceQueueCreateFlags( bits ) );
-  }
-
-  template <> struct FlagTraits<DeviceQueueCreateFlagBits>
-  {
-    enum
-    {
-      allFlags = VkFlags(DeviceQueueCreateFlagBits::eProtected)
-    };
-  };
-
-  struct DeviceQueueCreateInfo
-  {
-    DeviceQueueCreateInfo( DeviceQueueCreateFlags flags_ = DeviceQueueCreateFlags(),
-                           uint32_t queueFamilyIndex_ = 0,
-                           uint32_t queueCount_ = 0,
-                           const float* pQueuePriorities_ = nullptr )
-      : flags( flags_ )
-      , queueFamilyIndex( queueFamilyIndex_ )
-      , queueCount( queueCount_ )
-      , pQueuePriorities( pQueuePriorities_ )
-    {
-    }
-
-    DeviceQueueCreateInfo( VkDeviceQueueCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DeviceQueueCreateInfo ) );
-    }
-
-    DeviceQueueCreateInfo& operator=( VkDeviceQueueCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DeviceQueueCreateInfo ) );
-      return *this;
-    }
-    DeviceQueueCreateInfo& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    DeviceQueueCreateInfo& setFlags( DeviceQueueCreateFlags flags_ )
-    {
-      flags = flags_;
-      return *this;
-    }
-
-    DeviceQueueCreateInfo& setQueueFamilyIndex( uint32_t queueFamilyIndex_ )
-    {
-      queueFamilyIndex = queueFamilyIndex_;
-      return *this;
-    }
-
-    DeviceQueueCreateInfo& setQueueCount( uint32_t queueCount_ )
-    {
-      queueCount = queueCount_;
-      return *this;
-    }
-
-    DeviceQueueCreateInfo& setPQueuePriorities( const float* pQueuePriorities_ )
-    {
-      pQueuePriorities = pQueuePriorities_;
-      return *this;
-    }
-
-    operator VkDeviceQueueCreateInfo const&() const
-    {
-      return *reinterpret_cast<const VkDeviceQueueCreateInfo*>(this);
-    }
-
-    operator VkDeviceQueueCreateInfo &()
-    {
-      return *reinterpret_cast<VkDeviceQueueCreateInfo*>(this);
-    }
-
-    bool operator==( DeviceQueueCreateInfo const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( queueFamilyIndex == rhs.queueFamilyIndex )
-          && ( queueCount == rhs.queueCount )
-          && ( pQueuePriorities == rhs.pQueuePriorities );
-    }
-
-    bool operator!=( DeviceQueueCreateInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eDeviceQueueCreateInfo;
-
-  public:
-    const void* pNext = nullptr;
-    DeviceQueueCreateFlags flags;
-    uint32_t queueFamilyIndex;
-    uint32_t queueCount;
-    const float* pQueuePriorities;
-  };
-  static_assert( sizeof( DeviceQueueCreateInfo ) == sizeof( VkDeviceQueueCreateInfo ), "struct and wrapper have different size!" );
-
-  struct DeviceCreateInfo
-  {
-    DeviceCreateInfo( DeviceCreateFlags flags_ = DeviceCreateFlags(),
-                      uint32_t queueCreateInfoCount_ = 0,
-                      const DeviceQueueCreateInfo* pQueueCreateInfos_ = nullptr,
-                      uint32_t enabledLayerCount_ = 0,
-                      const char* const* ppEnabledLayerNames_ = nullptr,
-                      uint32_t enabledExtensionCount_ = 0,
-                      const char* const* ppEnabledExtensionNames_ = nullptr,
-                      const PhysicalDeviceFeatures* pEnabledFeatures_ = nullptr )
-      : flags( flags_ )
-      , queueCreateInfoCount( queueCreateInfoCount_ )
-      , pQueueCreateInfos( pQueueCreateInfos_ )
-      , enabledLayerCount( enabledLayerCount_ )
-      , ppEnabledLayerNames( ppEnabledLayerNames_ )
-      , enabledExtensionCount( enabledExtensionCount_ )
-      , ppEnabledExtensionNames( ppEnabledExtensionNames_ )
-      , pEnabledFeatures( pEnabledFeatures_ )
-    {
-    }
-
-    DeviceCreateInfo( VkDeviceCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DeviceCreateInfo ) );
-    }
-
-    DeviceCreateInfo& operator=( VkDeviceCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DeviceCreateInfo ) );
-      return *this;
-    }
-    DeviceCreateInfo& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    DeviceCreateInfo& setFlags( DeviceCreateFlags flags_ )
-    {
-      flags = flags_;
-      return *this;
-    }
-
-    DeviceCreateInfo& setQueueCreateInfoCount( uint32_t queueCreateInfoCount_ )
-    {
-      queueCreateInfoCount = queueCreateInfoCount_;
-      return *this;
-    }
-
-    DeviceCreateInfo& setPQueueCreateInfos( const DeviceQueueCreateInfo* pQueueCreateInfos_ )
-    {
-      pQueueCreateInfos = pQueueCreateInfos_;
-      return *this;
-    }
-
-    DeviceCreateInfo& setEnabledLayerCount( uint32_t enabledLayerCount_ )
-    {
-      enabledLayerCount = enabledLayerCount_;
-      return *this;
-    }
-
-    DeviceCreateInfo& setPpEnabledLayerNames( const char* const* ppEnabledLayerNames_ )
-    {
-      ppEnabledLayerNames = ppEnabledLayerNames_;
-      return *this;
-    }
-
-    DeviceCreateInfo& setEnabledExtensionCount( uint32_t enabledExtensionCount_ )
-    {
-      enabledExtensionCount = enabledExtensionCount_;
-      return *this;
-    }
-
-    DeviceCreateInfo& setPpEnabledExtensionNames( const char* const* ppEnabledExtensionNames_ )
-    {
-      ppEnabledExtensionNames = ppEnabledExtensionNames_;
-      return *this;
-    }
-
-    DeviceCreateInfo& setPEnabledFeatures( const PhysicalDeviceFeatures* pEnabledFeatures_ )
-    {
-      pEnabledFeatures = pEnabledFeatures_;
-      return *this;
-    }
-
-    operator VkDeviceCreateInfo const&() const
-    {
-      return *reinterpret_cast<const VkDeviceCreateInfo*>(this);
-    }
-
-    operator VkDeviceCreateInfo &()
-    {
-      return *reinterpret_cast<VkDeviceCreateInfo*>(this);
-    }
-
-    bool operator==( DeviceCreateInfo const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( queueCreateInfoCount == rhs.queueCreateInfoCount )
-          && ( pQueueCreateInfos == rhs.pQueueCreateInfos )
-          && ( enabledLayerCount == rhs.enabledLayerCount )
-          && ( ppEnabledLayerNames == rhs.ppEnabledLayerNames )
-          && ( enabledExtensionCount == rhs.enabledExtensionCount )
-          && ( ppEnabledExtensionNames == rhs.ppEnabledExtensionNames )
-          && ( pEnabledFeatures == rhs.pEnabledFeatures );
-    }
-
-    bool operator!=( DeviceCreateInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eDeviceCreateInfo;
-
-  public:
-    const void* pNext = nullptr;
-    DeviceCreateFlags flags;
-    uint32_t queueCreateInfoCount;
-    const DeviceQueueCreateInfo* pQueueCreateInfos;
-    uint32_t enabledLayerCount;
-    const char* const* ppEnabledLayerNames;
-    uint32_t enabledExtensionCount;
-    const char* const* ppEnabledExtensionNames;
-    const PhysicalDeviceFeatures* pEnabledFeatures;
-  };
-  static_assert( sizeof( DeviceCreateInfo ) == sizeof( VkDeviceCreateInfo ), "struct and wrapper have different size!" );
-
-  struct DeviceQueueInfo2
-  {
-    DeviceQueueInfo2( DeviceQueueCreateFlags flags_ = DeviceQueueCreateFlags(),
-                      uint32_t queueFamilyIndex_ = 0,
-                      uint32_t queueIndex_ = 0 )
-      : flags( flags_ )
-      , queueFamilyIndex( queueFamilyIndex_ )
-      , queueIndex( queueIndex_ )
-    {
-    }
-
-    DeviceQueueInfo2( VkDeviceQueueInfo2 const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DeviceQueueInfo2 ) );
-    }
-
-    DeviceQueueInfo2& operator=( VkDeviceQueueInfo2 const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DeviceQueueInfo2 ) );
-      return *this;
-    }
-    DeviceQueueInfo2& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    DeviceQueueInfo2& setFlags( DeviceQueueCreateFlags flags_ )
-    {
-      flags = flags_;
-      return *this;
-    }
-
-    DeviceQueueInfo2& setQueueFamilyIndex( uint32_t queueFamilyIndex_ )
-    {
-      queueFamilyIndex = queueFamilyIndex_;
-      return *this;
-    }
-
-    DeviceQueueInfo2& setQueueIndex( uint32_t queueIndex_ )
-    {
-      queueIndex = queueIndex_;
-      return *this;
-    }
-
-    operator VkDeviceQueueInfo2 const&() const
-    {
-      return *reinterpret_cast<const VkDeviceQueueInfo2*>(this);
-    }
-
-    operator VkDeviceQueueInfo2 &()
-    {
-      return *reinterpret_cast<VkDeviceQueueInfo2*>(this);
-    }
-
-    bool operator==( DeviceQueueInfo2 const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( queueFamilyIndex == rhs.queueFamilyIndex )
-          && ( queueIndex == rhs.queueIndex );
-    }
-
-    bool operator!=( DeviceQueueInfo2 const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eDeviceQueueInfo2;
-
-  public:
-    const void* pNext = nullptr;
-    DeviceQueueCreateFlags flags;
-    uint32_t queueFamilyIndex;
-    uint32_t queueIndex;
-  };
-  static_assert( sizeof( DeviceQueueInfo2 ) == sizeof( VkDeviceQueueInfo2 ), "struct and wrapper have different size!" );
-
-  enum class MemoryPropertyFlagBits
-  {
-    eDeviceLocal = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT,
-    eHostVisible = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT,
-    eHostCoherent = VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
-    eHostCached = VK_MEMORY_PROPERTY_HOST_CACHED_BIT,
-    eLazilyAllocated = VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT,
-    eProtected = VK_MEMORY_PROPERTY_PROTECTED_BIT
-  };
-
-  using MemoryPropertyFlags = Flags<MemoryPropertyFlagBits, VkMemoryPropertyFlags>;
-
-  VULKAN_HPP_INLINE MemoryPropertyFlags operator|( MemoryPropertyFlagBits bit0, MemoryPropertyFlagBits bit1 )
-  {
-    return MemoryPropertyFlags( bit0 ) | bit1;
-  }
-
-  VULKAN_HPP_INLINE MemoryPropertyFlags operator~( MemoryPropertyFlagBits bits )
-  {
-    return ~( MemoryPropertyFlags( bits ) );
-  }
-
-  template <> struct FlagTraits<MemoryPropertyFlagBits>
-  {
-    enum
-    {
-      allFlags = VkFlags(MemoryPropertyFlagBits::eDeviceLocal) | VkFlags(MemoryPropertyFlagBits::eHostVisible) | VkFlags(MemoryPropertyFlagBits::eHostCoherent) | VkFlags(MemoryPropertyFlagBits::eHostCached) | VkFlags(MemoryPropertyFlagBits::eLazilyAllocated) | VkFlags(MemoryPropertyFlagBits::eProtected)
-    };
-  };
-
-  struct MemoryType
-  {
-    operator VkMemoryType const&() const
-    {
-      return *reinterpret_cast<const VkMemoryType*>(this);
-    }
-
-    operator VkMemoryType &()
-    {
-      return *reinterpret_cast<VkMemoryType*>(this);
-    }
-
-    bool operator==( MemoryType const& rhs ) const
-    {
-      return ( propertyFlags == rhs.propertyFlags )
-          && ( heapIndex == rhs.heapIndex );
-    }
-
-    bool operator!=( MemoryType const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    MemoryPropertyFlags propertyFlags;
-    uint32_t heapIndex;
-  };
-  static_assert( sizeof( MemoryType ) == sizeof( VkMemoryType ), "struct and wrapper have different size!" );
-
-  enum class MemoryHeapFlagBits
-  {
-    eDeviceLocal = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT,
-    eMultiInstance = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT,
-    eMultiInstanceKHR = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT
-  };
-
-  using MemoryHeapFlags = Flags<MemoryHeapFlagBits, VkMemoryHeapFlags>;
-
-  VULKAN_HPP_INLINE MemoryHeapFlags operator|( MemoryHeapFlagBits bit0, MemoryHeapFlagBits bit1 )
-  {
-    return MemoryHeapFlags( bit0 ) | bit1;
-  }
-
-  VULKAN_HPP_INLINE MemoryHeapFlags operator~( MemoryHeapFlagBits bits )
-  {
-    return ~( MemoryHeapFlags( bits ) );
-  }
-
-  template <> struct FlagTraits<MemoryHeapFlagBits>
-  {
-    enum
-    {
-      allFlags = VkFlags(MemoryHeapFlagBits::eDeviceLocal) | VkFlags(MemoryHeapFlagBits::eMultiInstance)
-    };
-  };
-
-  struct MemoryHeap
-  {
-    operator VkMemoryHeap const&() const
-    {
-      return *reinterpret_cast<const VkMemoryHeap*>(this);
-    }
-
-    operator VkMemoryHeap &()
-    {
-      return *reinterpret_cast<VkMemoryHeap*>(this);
-    }
-
-    bool operator==( MemoryHeap const& rhs ) const
-    {
-      return ( size == rhs.size )
-          && ( flags == rhs.flags );
-    }
-
-    bool operator!=( MemoryHeap const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    DeviceSize size;
-    MemoryHeapFlags flags;
-  };
-  static_assert( sizeof( MemoryHeap ) == sizeof( VkMemoryHeap ), "struct and wrapper have different size!" );
-
-  struct PhysicalDeviceMemoryProperties
-  {
-    operator VkPhysicalDeviceMemoryProperties const&() const
-    {
-      return *reinterpret_cast<const VkPhysicalDeviceMemoryProperties*>(this);
-    }
-
-    operator VkPhysicalDeviceMemoryProperties &()
-    {
-      return *reinterpret_cast<VkPhysicalDeviceMemoryProperties*>(this);
-    }
-
-    bool operator==( PhysicalDeviceMemoryProperties const& rhs ) const
-    {
-      return ( memoryTypeCount == rhs.memoryTypeCount )
-          && ( memcmp( memoryTypes, rhs.memoryTypes, VK_MAX_MEMORY_TYPES * sizeof( MemoryType ) ) == 0 )
-          && ( memoryHeapCount == rhs.memoryHeapCount )
-          && ( memcmp( memoryHeaps, rhs.memoryHeaps, VK_MAX_MEMORY_HEAPS * sizeof( MemoryHeap ) ) == 0 );
-    }
-
-    bool operator!=( PhysicalDeviceMemoryProperties const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    uint32_t memoryTypeCount;
-    MemoryType memoryTypes[VK_MAX_MEMORY_TYPES];
-    uint32_t memoryHeapCount;
-    MemoryHeap memoryHeaps[VK_MAX_MEMORY_HEAPS];
-  };
-  static_assert( sizeof( PhysicalDeviceMemoryProperties ) == sizeof( VkPhysicalDeviceMemoryProperties ), "struct and wrapper have different size!" );
-
-  struct PhysicalDeviceMemoryProperties2
-  {
-    operator VkPhysicalDeviceMemoryProperties2 const&() const
-    {
-      return *reinterpret_cast<const VkPhysicalDeviceMemoryProperties2*>(this);
-    }
-
-    operator VkPhysicalDeviceMemoryProperties2 &()
-    {
-      return *reinterpret_cast<VkPhysicalDeviceMemoryProperties2*>(this);
-    }
-
-    bool operator==( PhysicalDeviceMemoryProperties2 const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( memoryProperties == rhs.memoryProperties );
-    }
-
-    bool operator!=( PhysicalDeviceMemoryProperties2 const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePhysicalDeviceMemoryProperties2;
-
-  public:
-    void* pNext = nullptr;
-    PhysicalDeviceMemoryProperties memoryProperties;
-  };
-  static_assert( sizeof( PhysicalDeviceMemoryProperties2 ) == sizeof( VkPhysicalDeviceMemoryProperties2 ), "struct and wrapper have different size!" );
-
-  using PhysicalDeviceMemoryProperties2KHR = PhysicalDeviceMemoryProperties2;
-
-  enum class AccessFlagBits
-  {
-    eIndirectCommandRead = VK_ACCESS_INDIRECT_COMMAND_READ_BIT,
-    eIndexRead = VK_ACCESS_INDEX_READ_BIT,
-    eVertexAttributeRead = VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT,
-    eUniformRead = VK_ACCESS_UNIFORM_READ_BIT,
-    eInputAttachmentRead = VK_ACCESS_INPUT_ATTACHMENT_READ_BIT,
-    eShaderRead = VK_ACCESS_SHADER_READ_BIT,
-    eShaderWrite = VK_ACCESS_SHADER_WRITE_BIT,
-    eColorAttachmentRead = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT,
-    eColorAttachmentWrite = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
-    eDepthStencilAttachmentRead = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT,
-    eDepthStencilAttachmentWrite = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT,
-    eTransferRead = VK_ACCESS_TRANSFER_READ_BIT,
-    eTransferWrite = VK_ACCESS_TRANSFER_WRITE_BIT,
-    eHostRead = VK_ACCESS_HOST_READ_BIT,
-    eHostWrite = VK_ACCESS_HOST_WRITE_BIT,
-    eMemoryRead = VK_ACCESS_MEMORY_READ_BIT,
-    eMemoryWrite = VK_ACCESS_MEMORY_WRITE_BIT,
-    eTransformFeedbackWriteEXT = VK_ACCESS_TRANSFORM_FEEDBACK_WRITE_BIT_EXT,
-    eTransformFeedbackCounterReadEXT = VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT,
-    eTransformFeedbackCounterWriteEXT = VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT,
-    eConditionalRenderingReadEXT = VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT,
-    eCommandProcessReadNVX = VK_ACCESS_COMMAND_PROCESS_READ_BIT_NVX,
-    eCommandProcessWriteNVX = VK_ACCESS_COMMAND_PROCESS_WRITE_BIT_NVX,
-    eColorAttachmentReadNoncoherentEXT = VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT,
-    eShadingRateImageReadNV = VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV,
-    eAccelerationStructureReadNV = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV,
-    eAccelerationStructureWriteNV = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV
-  };
-
-  using AccessFlags = Flags<AccessFlagBits, VkAccessFlags>;
-
-  VULKAN_HPP_INLINE AccessFlags operator|( AccessFlagBits bit0, AccessFlagBits bit1 )
-  {
-    return AccessFlags( bit0 ) | bit1;
-  }
-
-  VULKAN_HPP_INLINE AccessFlags operator~( AccessFlagBits bits )
-  {
-    return ~( AccessFlags( bits ) );
-  }
-
-  template <> struct FlagTraits<AccessFlagBits>
-  {
-    enum
-    {
-      allFlags = VkFlags(AccessFlagBits::eIndirectCommandRead) | VkFlags(AccessFlagBits::eIndexRead) | VkFlags(AccessFlagBits::eVertexAttributeRead) | VkFlags(AccessFlagBits::eUniformRead) | VkFlags(AccessFlagBits::eInputAttachmentRead) | VkFlags(AccessFlagBits::eShaderRead) | VkFlags(AccessFlagBits::eShaderWrite) | VkFlags(AccessFlagBits::eColorAttachmentRead) | VkFlags(AccessFlagBits::eColorAttachmentWrite) | VkFlags(AccessFlagBits::eDepthStencilAttachmentRead) | VkFlags(AccessFlagBits::eDepthStencilAttachmentWrite) | VkFlags(AccessFlagBits::eTransferRead) | VkFlags(AccessFlagBits::eTransferWrite) | VkFlags(AccessFlagBits::eHostRead) | VkFlags(AccessFlagBits::eHostWrite) | VkFlags(AccessFlagBits::eMemoryRead) | VkFlags(AccessFlagBits::eMemoryWrite) | VkFlags(AccessFlagBits::eTransformFeedbackWriteEXT) | VkFlags(AccessFlagBits::eTransformFeedbackCounterReadEXT) | VkFlags(AccessFlagBits::eTransformFeedbackCounterWriteEXT) | VkFlags(AccessFlagBits::eConditionalRenderingReadEXT) | VkFlags(AccessFlagBits::eCommandProcessReadNVX) | VkFlags(AccessFlagBits::eCommandProcessWriteNVX) | VkFlags(AccessFlagBits::eColorAttachmentReadNoncoherentEXT) | VkFlags(AccessFlagBits::eShadingRateImageReadNV) | VkFlags(AccessFlagBits::eAccelerationStructureReadNV) | VkFlags(AccessFlagBits::eAccelerationStructureWriteNV)
-    };
-  };
-
-  struct MemoryBarrier
-  {
-    MemoryBarrier( AccessFlags srcAccessMask_ = AccessFlags(),
-                   AccessFlags dstAccessMask_ = AccessFlags() )
-      : srcAccessMask( srcAccessMask_ )
-      , dstAccessMask( dstAccessMask_ )
-    {
-    }
-
-    MemoryBarrier( VkMemoryBarrier const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( MemoryBarrier ) );
-    }
-
-    MemoryBarrier& operator=( VkMemoryBarrier const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( MemoryBarrier ) );
-      return *this;
-    }
-    MemoryBarrier& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    MemoryBarrier& setSrcAccessMask( AccessFlags srcAccessMask_ )
-    {
-      srcAccessMask = srcAccessMask_;
-      return *this;
-    }
-
-    MemoryBarrier& setDstAccessMask( AccessFlags dstAccessMask_ )
-    {
-      dstAccessMask = dstAccessMask_;
-      return *this;
-    }
-
-    operator VkMemoryBarrier const&() const
-    {
-      return *reinterpret_cast<const VkMemoryBarrier*>(this);
-    }
-
-    operator VkMemoryBarrier &()
-    {
-      return *reinterpret_cast<VkMemoryBarrier*>(this);
-    }
-
-    bool operator==( MemoryBarrier const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( srcAccessMask == rhs.srcAccessMask )
-          && ( dstAccessMask == rhs.dstAccessMask );
-    }
-
-    bool operator!=( MemoryBarrier const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eMemoryBarrier;
-
-  public:
-    const void* pNext = nullptr;
-    AccessFlags srcAccessMask;
-    AccessFlags dstAccessMask;
-  };
-  static_assert( sizeof( MemoryBarrier ) == sizeof( VkMemoryBarrier ), "struct and wrapper have different size!" );
-
-  struct BufferMemoryBarrier
-  {
-    BufferMemoryBarrier( AccessFlags srcAccessMask_ = AccessFlags(),
-                         AccessFlags dstAccessMask_ = AccessFlags(),
-                         uint32_t srcQueueFamilyIndex_ = 0,
-                         uint32_t dstQueueFamilyIndex_ = 0,
-                         Buffer buffer_ = Buffer(),
-                         DeviceSize offset_ = 0,
-                         DeviceSize size_ = 0 )
-      : srcAccessMask( srcAccessMask_ )
-      , dstAccessMask( dstAccessMask_ )
-      , srcQueueFamilyIndex( srcQueueFamilyIndex_ )
-      , dstQueueFamilyIndex( dstQueueFamilyIndex_ )
-      , buffer( buffer_ )
-      , offset( offset_ )
-      , size( size_ )
-    {
-    }
-
-    BufferMemoryBarrier( VkBufferMemoryBarrier const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( BufferMemoryBarrier ) );
-    }
-
-    BufferMemoryBarrier& operator=( VkBufferMemoryBarrier const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( BufferMemoryBarrier ) );
-      return *this;
-    }
-    BufferMemoryBarrier& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    BufferMemoryBarrier& setSrcAccessMask( AccessFlags srcAccessMask_ )
-    {
-      srcAccessMask = srcAccessMask_;
-      return *this;
-    }
-
-    BufferMemoryBarrier& setDstAccessMask( AccessFlags dstAccessMask_ )
-    {
-      dstAccessMask = dstAccessMask_;
-      return *this;
-    }
-
-    BufferMemoryBarrier& setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ )
-    {
-      srcQueueFamilyIndex = srcQueueFamilyIndex_;
-      return *this;
-    }
-
-    BufferMemoryBarrier& setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ )
-    {
-      dstQueueFamilyIndex = dstQueueFamilyIndex_;
-      return *this;
-    }
-
-    BufferMemoryBarrier& setBuffer( Buffer buffer_ )
+    BindBufferMemoryInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
     {
       buffer = buffer_;
       return *this;
     }
 
-    BufferMemoryBarrier& setOffset( DeviceSize offset_ )
-    {
-      offset = offset_;
-      return *this;
-    }
-
-    BufferMemoryBarrier& setSize( DeviceSize size_ )
-    {
-      size = size_;
-      return *this;
-    }
-
-    operator VkBufferMemoryBarrier const&() const
-    {
-      return *reinterpret_cast<const VkBufferMemoryBarrier*>(this);
-    }
-
-    operator VkBufferMemoryBarrier &()
-    {
-      return *reinterpret_cast<VkBufferMemoryBarrier*>(this);
-    }
-
-    bool operator==( BufferMemoryBarrier const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( srcAccessMask == rhs.srcAccessMask )
-          && ( dstAccessMask == rhs.dstAccessMask )
-          && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex )
-          && ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex )
-          && ( buffer == rhs.buffer )
-          && ( offset == rhs.offset )
-          && ( size == rhs.size );
-    }
-
-    bool operator!=( BufferMemoryBarrier const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eBufferMemoryBarrier;
-
-  public:
-    const void* pNext = nullptr;
-    AccessFlags srcAccessMask;
-    AccessFlags dstAccessMask;
-    uint32_t srcQueueFamilyIndex;
-    uint32_t dstQueueFamilyIndex;
-    Buffer buffer;
-    DeviceSize offset;
-    DeviceSize size;
-  };
-  static_assert( sizeof( BufferMemoryBarrier ) == sizeof( VkBufferMemoryBarrier ), "struct and wrapper have different size!" );
-
-  enum class BufferUsageFlagBits
-  {
-    eTransferSrc = VK_BUFFER_USAGE_TRANSFER_SRC_BIT,
-    eTransferDst = VK_BUFFER_USAGE_TRANSFER_DST_BIT,
-    eUniformTexelBuffer = VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT,
-    eStorageTexelBuffer = VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT,
-    eUniformBuffer = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT,
-    eStorageBuffer = VK_BUFFER_USAGE_STORAGE_BUFFER_BIT,
-    eIndexBuffer = VK_BUFFER_USAGE_INDEX_BUFFER_BIT,
-    eVertexBuffer = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT,
-    eIndirectBuffer = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT,
-    eTransformFeedbackBufferEXT = VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT,
-    eTransformFeedbackCounterBufferEXT = VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT,
-    eConditionalRenderingEXT = VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT,
-    eRayTracingNV = VK_BUFFER_USAGE_RAY_TRACING_BIT_NV
-  };
-
-  using BufferUsageFlags = Flags<BufferUsageFlagBits, VkBufferUsageFlags>;
-
-  VULKAN_HPP_INLINE BufferUsageFlags operator|( BufferUsageFlagBits bit0, BufferUsageFlagBits bit1 )
-  {
-    return BufferUsageFlags( bit0 ) | bit1;
-  }
-
-  VULKAN_HPP_INLINE BufferUsageFlags operator~( BufferUsageFlagBits bits )
-  {
-    return ~( BufferUsageFlags( bits ) );
-  }
-
-  template <> struct FlagTraits<BufferUsageFlagBits>
-  {
-    enum
-    {
-      allFlags = VkFlags(BufferUsageFlagBits::eTransferSrc) | VkFlags(BufferUsageFlagBits::eTransferDst) | VkFlags(BufferUsageFlagBits::eUniformTexelBuffer) | VkFlags(BufferUsageFlagBits::eStorageTexelBuffer) | VkFlags(BufferUsageFlagBits::eUniformBuffer) | VkFlags(BufferUsageFlagBits::eStorageBuffer) | VkFlags(BufferUsageFlagBits::eIndexBuffer) | VkFlags(BufferUsageFlagBits::eVertexBuffer) | VkFlags(BufferUsageFlagBits::eIndirectBuffer) | VkFlags(BufferUsageFlagBits::eTransformFeedbackBufferEXT) | VkFlags(BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT) | VkFlags(BufferUsageFlagBits::eConditionalRenderingEXT) | VkFlags(BufferUsageFlagBits::eRayTracingNV)
-    };
-  };
-
-  enum class BufferCreateFlagBits
-  {
-    eSparseBinding = VK_BUFFER_CREATE_SPARSE_BINDING_BIT,
-    eSparseResidency = VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT,
-    eSparseAliased = VK_BUFFER_CREATE_SPARSE_ALIASED_BIT,
-    eProtected = VK_BUFFER_CREATE_PROTECTED_BIT
-  };
-
-  using BufferCreateFlags = Flags<BufferCreateFlagBits, VkBufferCreateFlags>;
-
-  VULKAN_HPP_INLINE BufferCreateFlags operator|( BufferCreateFlagBits bit0, BufferCreateFlagBits bit1 )
-  {
-    return BufferCreateFlags( bit0 ) | bit1;
-  }
-
-  VULKAN_HPP_INLINE BufferCreateFlags operator~( BufferCreateFlagBits bits )
-  {
-    return ~( BufferCreateFlags( bits ) );
-  }
-
-  template <> struct FlagTraits<BufferCreateFlagBits>
-  {
-    enum
-    {
-      allFlags = VkFlags(BufferCreateFlagBits::eSparseBinding) | VkFlags(BufferCreateFlagBits::eSparseResidency) | VkFlags(BufferCreateFlagBits::eSparseAliased) | VkFlags(BufferCreateFlagBits::eProtected)
-    };
-  };
-
-  struct BufferCreateInfo
-  {
-    BufferCreateInfo( BufferCreateFlags flags_ = BufferCreateFlags(),
-                      DeviceSize size_ = 0,
-                      BufferUsageFlags usage_ = BufferUsageFlags(),
-                      SharingMode sharingMode_ = SharingMode::eExclusive,
-                      uint32_t queueFamilyIndexCount_ = 0,
-                      const uint32_t* pQueueFamilyIndices_ = nullptr )
-      : flags( flags_ )
-      , size( size_ )
-      , usage( usage_ )
-      , sharingMode( sharingMode_ )
-      , queueFamilyIndexCount( queueFamilyIndexCount_ )
-      , pQueueFamilyIndices( pQueueFamilyIndices_ )
-    {
-    }
-
-    BufferCreateInfo( VkBufferCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( BufferCreateInfo ) );
-    }
-
-    BufferCreateInfo& operator=( VkBufferCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( BufferCreateInfo ) );
-      return *this;
-    }
-    BufferCreateInfo& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    BufferCreateInfo& setFlags( BufferCreateFlags flags_ )
-    {
-      flags = flags_;
-      return *this;
-    }
-
-    BufferCreateInfo& setSize( DeviceSize size_ )
-    {
-      size = size_;
-      return *this;
-    }
-
-    BufferCreateInfo& setUsage( BufferUsageFlags usage_ )
-    {
-      usage = usage_;
-      return *this;
-    }
-
-    BufferCreateInfo& setSharingMode( SharingMode sharingMode_ )
-    {
-      sharingMode = sharingMode_;
-      return *this;
-    }
-
-    BufferCreateInfo& setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ )
-    {
-      queueFamilyIndexCount = queueFamilyIndexCount_;
-      return *this;
-    }
-
-    BufferCreateInfo& setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ )
-    {
-      pQueueFamilyIndices = pQueueFamilyIndices_;
-      return *this;
-    }
-
-    operator VkBufferCreateInfo const&() const
-    {
-      return *reinterpret_cast<const VkBufferCreateInfo*>(this);
-    }
-
-    operator VkBufferCreateInfo &()
-    {
-      return *reinterpret_cast<VkBufferCreateInfo*>(this);
-    }
-
-    bool operator==( BufferCreateInfo const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( size == rhs.size )
-          && ( usage == rhs.usage )
-          && ( sharingMode == rhs.sharingMode )
-          && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount )
-          && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices );
-    }
-
-    bool operator!=( BufferCreateInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eBufferCreateInfo;
-
-  public:
-    const void* pNext = nullptr;
-    BufferCreateFlags flags;
-    DeviceSize size;
-    BufferUsageFlags usage;
-    SharingMode sharingMode;
-    uint32_t queueFamilyIndexCount;
-    const uint32_t* pQueueFamilyIndices;
-  };
-  static_assert( sizeof( BufferCreateInfo ) == sizeof( VkBufferCreateInfo ), "struct and wrapper have different size!" );
-
-  enum class ShaderStageFlagBits
-  {
-    eVertex = VK_SHADER_STAGE_VERTEX_BIT,
-    eTessellationControl = VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT,
-    eTessellationEvaluation = VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT,
-    eGeometry = VK_SHADER_STAGE_GEOMETRY_BIT,
-    eFragment = VK_SHADER_STAGE_FRAGMENT_BIT,
-    eCompute = VK_SHADER_STAGE_COMPUTE_BIT,
-    eAllGraphics = VK_SHADER_STAGE_ALL_GRAPHICS,
-    eAll = VK_SHADER_STAGE_ALL,
-    eRaygenNV = VK_SHADER_STAGE_RAYGEN_BIT_NV,
-    eAnyHitNV = VK_SHADER_STAGE_ANY_HIT_BIT_NV,
-    eClosestHitNV = VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV,
-    eMissNV = VK_SHADER_STAGE_MISS_BIT_NV,
-    eIntersectionNV = VK_SHADER_STAGE_INTERSECTION_BIT_NV,
-    eCallableNV = VK_SHADER_STAGE_CALLABLE_BIT_NV,
-    eTaskNV = VK_SHADER_STAGE_TASK_BIT_NV,
-    eMeshNV = VK_SHADER_STAGE_MESH_BIT_NV
-  };
-
-  using ShaderStageFlags = Flags<ShaderStageFlagBits, VkShaderStageFlags>;
-
-  VULKAN_HPP_INLINE ShaderStageFlags operator|( ShaderStageFlagBits bit0, ShaderStageFlagBits bit1 )
-  {
-    return ShaderStageFlags( bit0 ) | bit1;
-  }
-
-  VULKAN_HPP_INLINE ShaderStageFlags operator~( ShaderStageFlagBits bits )
-  {
-    return ~( ShaderStageFlags( bits ) );
-  }
-
-  template <> struct FlagTraits<ShaderStageFlagBits>
-  {
-    enum
-    {
-      allFlags = VkFlags(ShaderStageFlagBits::eVertex) | VkFlags(ShaderStageFlagBits::eTessellationControl) | VkFlags(ShaderStageFlagBits::eTessellationEvaluation) | VkFlags(ShaderStageFlagBits::eGeometry) | VkFlags(ShaderStageFlagBits::eFragment) | VkFlags(ShaderStageFlagBits::eCompute) | VkFlags(ShaderStageFlagBits::eAllGraphics) | VkFlags(ShaderStageFlagBits::eAll) | VkFlags(ShaderStageFlagBits::eRaygenNV) | VkFlags(ShaderStageFlagBits::eAnyHitNV) | VkFlags(ShaderStageFlagBits::eClosestHitNV) | VkFlags(ShaderStageFlagBits::eMissNV) | VkFlags(ShaderStageFlagBits::eIntersectionNV) | VkFlags(ShaderStageFlagBits::eCallableNV) | VkFlags(ShaderStageFlagBits::eTaskNV) | VkFlags(ShaderStageFlagBits::eMeshNV)
-    };
-  };
-
-  struct DescriptorSetLayoutBinding
-  {
-    DescriptorSetLayoutBinding( uint32_t binding_ = 0,
-                                DescriptorType descriptorType_ = DescriptorType::eSampler,
-                                uint32_t descriptorCount_ = 0,
-                                ShaderStageFlags stageFlags_ = ShaderStageFlags(),
-                                const Sampler* pImmutableSamplers_ = nullptr )
-      : binding( binding_ )
-      , descriptorType( descriptorType_ )
-      , descriptorCount( descriptorCount_ )
-      , stageFlags( stageFlags_ )
-      , pImmutableSamplers( pImmutableSamplers_ )
-    {
-    }
-
-    DescriptorSetLayoutBinding( VkDescriptorSetLayoutBinding const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DescriptorSetLayoutBinding ) );
-    }
-
-    DescriptorSetLayoutBinding& operator=( VkDescriptorSetLayoutBinding const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DescriptorSetLayoutBinding ) );
-      return *this;
-    }
-    DescriptorSetLayoutBinding& setBinding( uint32_t binding_ )
-    {
-      binding = binding_;
-      return *this;
-    }
-
-    DescriptorSetLayoutBinding& setDescriptorType( DescriptorType descriptorType_ )
-    {
-      descriptorType = descriptorType_;
-      return *this;
-    }
-
-    DescriptorSetLayoutBinding& setDescriptorCount( uint32_t descriptorCount_ )
-    {
-      descriptorCount = descriptorCount_;
-      return *this;
-    }
-
-    DescriptorSetLayoutBinding& setStageFlags( ShaderStageFlags stageFlags_ )
-    {
-      stageFlags = stageFlags_;
-      return *this;
-    }
-
-    DescriptorSetLayoutBinding& setPImmutableSamplers( const Sampler* pImmutableSamplers_ )
-    {
-      pImmutableSamplers = pImmutableSamplers_;
-      return *this;
-    }
-
-    operator VkDescriptorSetLayoutBinding const&() const
-    {
-      return *reinterpret_cast<const VkDescriptorSetLayoutBinding*>(this);
-    }
-
-    operator VkDescriptorSetLayoutBinding &()
-    {
-      return *reinterpret_cast<VkDescriptorSetLayoutBinding*>(this);
-    }
-
-    bool operator==( DescriptorSetLayoutBinding const& rhs ) const
-    {
-      return ( binding == rhs.binding )
-          && ( descriptorType == rhs.descriptorType )
-          && ( descriptorCount == rhs.descriptorCount )
-          && ( stageFlags == rhs.stageFlags )
-          && ( pImmutableSamplers == rhs.pImmutableSamplers );
-    }
-
-    bool operator!=( DescriptorSetLayoutBinding const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    uint32_t binding;
-    DescriptorType descriptorType;
-    uint32_t descriptorCount;
-    ShaderStageFlags stageFlags;
-    const Sampler* pImmutableSamplers;
-  };
-  static_assert( sizeof( DescriptorSetLayoutBinding ) == sizeof( VkDescriptorSetLayoutBinding ), "struct and wrapper have different size!" );
-
-  struct PipelineShaderStageCreateInfo
-  {
-    PipelineShaderStageCreateInfo( PipelineShaderStageCreateFlags flags_ = PipelineShaderStageCreateFlags(),
-                                   ShaderStageFlagBits stage_ = ShaderStageFlagBits::eVertex,
-                                   ShaderModule module_ = ShaderModule(),
-                                   const char* pName_ = nullptr,
-                                   const SpecializationInfo* pSpecializationInfo_ = nullptr )
-      : flags( flags_ )
-      , stage( stage_ )
-      , module( module_ )
-      , pName( pName_ )
-      , pSpecializationInfo( pSpecializationInfo_ )
-    {
-    }
-
-    PipelineShaderStageCreateInfo( VkPipelineShaderStageCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PipelineShaderStageCreateInfo ) );
-    }
-
-    PipelineShaderStageCreateInfo& operator=( VkPipelineShaderStageCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PipelineShaderStageCreateInfo ) );
-      return *this;
-    }
-    PipelineShaderStageCreateInfo& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    PipelineShaderStageCreateInfo& setFlags( PipelineShaderStageCreateFlags flags_ )
-    {
-      flags = flags_;
-      return *this;
-    }
-
-    PipelineShaderStageCreateInfo& setStage( ShaderStageFlagBits stage_ )
-    {
-      stage = stage_;
-      return *this;
-    }
-
-    PipelineShaderStageCreateInfo& setModule( ShaderModule module_ )
-    {
-      module = module_;
-      return *this;
-    }
-
-    PipelineShaderStageCreateInfo& setPName( const char* pName_ )
-    {
-      pName = pName_;
-      return *this;
-    }
-
-    PipelineShaderStageCreateInfo& setPSpecializationInfo( const SpecializationInfo* pSpecializationInfo_ )
-    {
-      pSpecializationInfo = pSpecializationInfo_;
-      return *this;
-    }
-
-    operator VkPipelineShaderStageCreateInfo const&() const
-    {
-      return *reinterpret_cast<const VkPipelineShaderStageCreateInfo*>(this);
-    }
-
-    operator VkPipelineShaderStageCreateInfo &()
-    {
-      return *reinterpret_cast<VkPipelineShaderStageCreateInfo*>(this);
-    }
-
-    bool operator==( PipelineShaderStageCreateInfo const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( stage == rhs.stage )
-          && ( module == rhs.module )
-          && ( pName == rhs.pName )
-          && ( pSpecializationInfo == rhs.pSpecializationInfo );
-    }
-
-    bool operator!=( PipelineShaderStageCreateInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePipelineShaderStageCreateInfo;
-
-  public:
-    const void* pNext = nullptr;
-    PipelineShaderStageCreateFlags flags;
-    ShaderStageFlagBits stage;
-    ShaderModule module;
-    const char* pName;
-    const SpecializationInfo* pSpecializationInfo;
-  };
-  static_assert( sizeof( PipelineShaderStageCreateInfo ) == sizeof( VkPipelineShaderStageCreateInfo ), "struct and wrapper have different size!" );
-
-  struct PushConstantRange
-  {
-    PushConstantRange( ShaderStageFlags stageFlags_ = ShaderStageFlags(),
-                       uint32_t offset_ = 0,
-                       uint32_t size_ = 0 )
-      : stageFlags( stageFlags_ )
-      , offset( offset_ )
-      , size( size_ )
-    {
-    }
-
-    PushConstantRange( VkPushConstantRange const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PushConstantRange ) );
-    }
-
-    PushConstantRange& operator=( VkPushConstantRange const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PushConstantRange ) );
-      return *this;
-    }
-    PushConstantRange& setStageFlags( ShaderStageFlags stageFlags_ )
-    {
-      stageFlags = stageFlags_;
-      return *this;
-    }
-
-    PushConstantRange& setOffset( uint32_t offset_ )
-    {
-      offset = offset_;
-      return *this;
-    }
-
-    PushConstantRange& setSize( uint32_t size_ )
-    {
-      size = size_;
-      return *this;
-    }
-
-    operator VkPushConstantRange const&() const
-    {
-      return *reinterpret_cast<const VkPushConstantRange*>(this);
-    }
-
-    operator VkPushConstantRange &()
-    {
-      return *reinterpret_cast<VkPushConstantRange*>(this);
-    }
-
-    bool operator==( PushConstantRange const& rhs ) const
-    {
-      return ( stageFlags == rhs.stageFlags )
-          && ( offset == rhs.offset )
-          && ( size == rhs.size );
-    }
-
-    bool operator!=( PushConstantRange const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    ShaderStageFlags stageFlags;
-    uint32_t offset;
-    uint32_t size;
-  };
-  static_assert( sizeof( PushConstantRange ) == sizeof( VkPushConstantRange ), "struct and wrapper have different size!" );
-
-  struct PipelineLayoutCreateInfo
-  {
-    PipelineLayoutCreateInfo( PipelineLayoutCreateFlags flags_ = PipelineLayoutCreateFlags(),
-                              uint32_t setLayoutCount_ = 0,
-                              const DescriptorSetLayout* pSetLayouts_ = nullptr,
-                              uint32_t pushConstantRangeCount_ = 0,
-                              const PushConstantRange* pPushConstantRanges_ = nullptr )
-      : flags( flags_ )
-      , setLayoutCount( setLayoutCount_ )
-      , pSetLayouts( pSetLayouts_ )
-      , pushConstantRangeCount( pushConstantRangeCount_ )
-      , pPushConstantRanges( pPushConstantRanges_ )
-    {
-    }
-
-    PipelineLayoutCreateInfo( VkPipelineLayoutCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PipelineLayoutCreateInfo ) );
-    }
-
-    PipelineLayoutCreateInfo& operator=( VkPipelineLayoutCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PipelineLayoutCreateInfo ) );
-      return *this;
-    }
-    PipelineLayoutCreateInfo& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    PipelineLayoutCreateInfo& setFlags( PipelineLayoutCreateFlags flags_ )
-    {
-      flags = flags_;
-      return *this;
-    }
-
-    PipelineLayoutCreateInfo& setSetLayoutCount( uint32_t setLayoutCount_ )
-    {
-      setLayoutCount = setLayoutCount_;
-      return *this;
-    }
-
-    PipelineLayoutCreateInfo& setPSetLayouts( const DescriptorSetLayout* pSetLayouts_ )
-    {
-      pSetLayouts = pSetLayouts_;
-      return *this;
-    }
-
-    PipelineLayoutCreateInfo& setPushConstantRangeCount( uint32_t pushConstantRangeCount_ )
-    {
-      pushConstantRangeCount = pushConstantRangeCount_;
-      return *this;
-    }
-
-    PipelineLayoutCreateInfo& setPPushConstantRanges( const PushConstantRange* pPushConstantRanges_ )
-    {
-      pPushConstantRanges = pPushConstantRanges_;
-      return *this;
-    }
-
-    operator VkPipelineLayoutCreateInfo const&() const
-    {
-      return *reinterpret_cast<const VkPipelineLayoutCreateInfo*>(this);
-    }
-
-    operator VkPipelineLayoutCreateInfo &()
-    {
-      return *reinterpret_cast<VkPipelineLayoutCreateInfo*>(this);
-    }
-
-    bool operator==( PipelineLayoutCreateInfo const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( setLayoutCount == rhs.setLayoutCount )
-          && ( pSetLayouts == rhs.pSetLayouts )
-          && ( pushConstantRangeCount == rhs.pushConstantRangeCount )
-          && ( pPushConstantRanges == rhs.pPushConstantRanges );
-    }
-
-    bool operator!=( PipelineLayoutCreateInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePipelineLayoutCreateInfo;
-
-  public:
-    const void* pNext = nullptr;
-    PipelineLayoutCreateFlags flags;
-    uint32_t setLayoutCount;
-    const DescriptorSetLayout* pSetLayouts;
-    uint32_t pushConstantRangeCount;
-    const PushConstantRange* pPushConstantRanges;
-  };
-  static_assert( sizeof( PipelineLayoutCreateInfo ) == sizeof( VkPipelineLayoutCreateInfo ), "struct and wrapper have different size!" );
-
-  struct ShaderStatisticsInfoAMD
-  {
-    operator VkShaderStatisticsInfoAMD const&() const
-    {
-      return *reinterpret_cast<const VkShaderStatisticsInfoAMD*>(this);
-    }
-
-    operator VkShaderStatisticsInfoAMD &()
-    {
-      return *reinterpret_cast<VkShaderStatisticsInfoAMD*>(this);
-    }
-
-    bool operator==( ShaderStatisticsInfoAMD const& rhs ) const
-    {
-      return ( shaderStageMask == rhs.shaderStageMask )
-          && ( resourceUsage == rhs.resourceUsage )
-          && ( numPhysicalVgprs == rhs.numPhysicalVgprs )
-          && ( numPhysicalSgprs == rhs.numPhysicalSgprs )
-          && ( numAvailableVgprs == rhs.numAvailableVgprs )
-          && ( numAvailableSgprs == rhs.numAvailableSgprs )
-          && ( memcmp( computeWorkGroupSize, rhs.computeWorkGroupSize, 3 * sizeof( uint32_t ) ) == 0 );
-    }
-
-    bool operator!=( ShaderStatisticsInfoAMD const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    ShaderStageFlags shaderStageMask;
-    ShaderResourceUsageAMD resourceUsage;
-    uint32_t numPhysicalVgprs;
-    uint32_t numPhysicalSgprs;
-    uint32_t numAvailableVgprs;
-    uint32_t numAvailableSgprs;
-    uint32_t computeWorkGroupSize[3];
-  };
-  static_assert( sizeof( ShaderStatisticsInfoAMD ) == sizeof( VkShaderStatisticsInfoAMD ), "struct and wrapper have different size!" );
-
-  enum class ImageUsageFlagBits
-  {
-    eTransferSrc = VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
-    eTransferDst = VK_IMAGE_USAGE_TRANSFER_DST_BIT,
-    eSampled = VK_IMAGE_USAGE_SAMPLED_BIT,
-    eStorage = VK_IMAGE_USAGE_STORAGE_BIT,
-    eColorAttachment = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
-    eDepthStencilAttachment = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT,
-    eTransientAttachment = VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT,
-    eInputAttachment = VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT,
-    eShadingRateImageNV = VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV
-  };
-
-  using ImageUsageFlags = Flags<ImageUsageFlagBits, VkImageUsageFlags>;
-
-  VULKAN_HPP_INLINE ImageUsageFlags operator|( ImageUsageFlagBits bit0, ImageUsageFlagBits bit1 )
-  {
-    return ImageUsageFlags( bit0 ) | bit1;
-  }
-
-  VULKAN_HPP_INLINE ImageUsageFlags operator~( ImageUsageFlagBits bits )
-  {
-    return ~( ImageUsageFlags( bits ) );
-  }
-
-  template <> struct FlagTraits<ImageUsageFlagBits>
-  {
-    enum
-    {
-      allFlags = VkFlags(ImageUsageFlagBits::eTransferSrc) | VkFlags(ImageUsageFlagBits::eTransferDst) | VkFlags(ImageUsageFlagBits::eSampled) | VkFlags(ImageUsageFlagBits::eStorage) | VkFlags(ImageUsageFlagBits::eColorAttachment) | VkFlags(ImageUsageFlagBits::eDepthStencilAttachment) | VkFlags(ImageUsageFlagBits::eTransientAttachment) | VkFlags(ImageUsageFlagBits::eInputAttachment) | VkFlags(ImageUsageFlagBits::eShadingRateImageNV)
-    };
-  };
-
-  struct SharedPresentSurfaceCapabilitiesKHR
-  {
-    operator VkSharedPresentSurfaceCapabilitiesKHR const&() const
-    {
-      return *reinterpret_cast<const VkSharedPresentSurfaceCapabilitiesKHR*>(this);
-    }
-
-    operator VkSharedPresentSurfaceCapabilitiesKHR &()
-    {
-      return *reinterpret_cast<VkSharedPresentSurfaceCapabilitiesKHR*>(this);
-    }
-
-    bool operator==( SharedPresentSurfaceCapabilitiesKHR const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( sharedPresentSupportedUsageFlags == rhs.sharedPresentSupportedUsageFlags );
-    }
-
-    bool operator!=( SharedPresentSurfaceCapabilitiesKHR const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eSharedPresentSurfaceCapabilitiesKHR;
-
-  public:
-    void* pNext = nullptr;
-    ImageUsageFlags sharedPresentSupportedUsageFlags;
-  };
-  static_assert( sizeof( SharedPresentSurfaceCapabilitiesKHR ) == sizeof( VkSharedPresentSurfaceCapabilitiesKHR ), "struct and wrapper have different size!" );
-
-  struct ImageViewUsageCreateInfo
-  {
-    ImageViewUsageCreateInfo( ImageUsageFlags usage_ = ImageUsageFlags() )
-      : usage( usage_ )
-    {
-    }
-
-    ImageViewUsageCreateInfo( VkImageViewUsageCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ImageViewUsageCreateInfo ) );
-    }
-
-    ImageViewUsageCreateInfo& operator=( VkImageViewUsageCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ImageViewUsageCreateInfo ) );
-      return *this;
-    }
-    ImageViewUsageCreateInfo& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    ImageViewUsageCreateInfo& setUsage( ImageUsageFlags usage_ )
-    {
-      usage = usage_;
-      return *this;
-    }
-
-    operator VkImageViewUsageCreateInfo const&() const
-    {
-      return *reinterpret_cast<const VkImageViewUsageCreateInfo*>(this);
-    }
-
-    operator VkImageViewUsageCreateInfo &()
-    {
-      return *reinterpret_cast<VkImageViewUsageCreateInfo*>(this);
-    }
-
-    bool operator==( ImageViewUsageCreateInfo const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( usage == rhs.usage );
-    }
-
-    bool operator!=( ImageViewUsageCreateInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eImageViewUsageCreateInfo;
-
-  public:
-    const void* pNext = nullptr;
-    ImageUsageFlags usage;
-  };
-  static_assert( sizeof( ImageViewUsageCreateInfo ) == sizeof( VkImageViewUsageCreateInfo ), "struct and wrapper have different size!" );
-
-  using ImageViewUsageCreateInfoKHR = ImageViewUsageCreateInfo;
-
-  enum class ImageCreateFlagBits
-  {
-    eSparseBinding = VK_IMAGE_CREATE_SPARSE_BINDING_BIT,
-    eSparseResidency = VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT,
-    eSparseAliased = VK_IMAGE_CREATE_SPARSE_ALIASED_BIT,
-    eMutableFormat = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT,
-    eCubeCompatible = VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT,
-    eAlias = VK_IMAGE_CREATE_ALIAS_BIT,
-    eAliasKHR = VK_IMAGE_CREATE_ALIAS_BIT,
-    eSplitInstanceBindRegions = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT,
-    eSplitInstanceBindRegionsKHR = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT,
-    e2DArrayCompatible = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT,
-    e2DArrayCompatibleKHR = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT,
-    eBlockTexelViewCompatible = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT,
-    eBlockTexelViewCompatibleKHR = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT,
-    eExtendedUsage = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT,
-    eExtendedUsageKHR = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT,
-    eProtected = VK_IMAGE_CREATE_PROTECTED_BIT,
-    eDisjoint = VK_IMAGE_CREATE_DISJOINT_BIT,
-    eDisjointKHR = VK_IMAGE_CREATE_DISJOINT_BIT,
-    eCornerSampledNV = VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV,
-    eSampleLocationsCompatibleDepthEXT = VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT
-  };
-
-  using ImageCreateFlags = Flags<ImageCreateFlagBits, VkImageCreateFlags>;
-
-  VULKAN_HPP_INLINE ImageCreateFlags operator|( ImageCreateFlagBits bit0, ImageCreateFlagBits bit1 )
-  {
-    return ImageCreateFlags( bit0 ) | bit1;
-  }
-
-  VULKAN_HPP_INLINE ImageCreateFlags operator~( ImageCreateFlagBits bits )
-  {
-    return ~( ImageCreateFlags( bits ) );
-  }
-
-  template <> struct FlagTraits<ImageCreateFlagBits>
-  {
-    enum
-    {
-      allFlags = VkFlags(ImageCreateFlagBits::eSparseBinding) | VkFlags(ImageCreateFlagBits::eSparseResidency) | VkFlags(ImageCreateFlagBits::eSparseAliased) | VkFlags(ImageCreateFlagBits::eMutableFormat) | VkFlags(ImageCreateFlagBits::eCubeCompatible) | VkFlags(ImageCreateFlagBits::eAlias) | VkFlags(ImageCreateFlagBits::eSplitInstanceBindRegions) | VkFlags(ImageCreateFlagBits::e2DArrayCompatible) | VkFlags(ImageCreateFlagBits::eBlockTexelViewCompatible) | VkFlags(ImageCreateFlagBits::eExtendedUsage) | VkFlags(ImageCreateFlagBits::eProtected) | VkFlags(ImageCreateFlagBits::eDisjoint) | VkFlags(ImageCreateFlagBits::eCornerSampledNV) | VkFlags(ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT)
-    };
-  };
-
-  struct PhysicalDeviceImageFormatInfo2
-  {
-    PhysicalDeviceImageFormatInfo2( Format format_ = Format::eUndefined,
-                                    ImageType type_ = ImageType::e1D,
-                                    ImageTiling tiling_ = ImageTiling::eOptimal,
-                                    ImageUsageFlags usage_ = ImageUsageFlags(),
-                                    ImageCreateFlags flags_ = ImageCreateFlags() )
-      : format( format_ )
-      , type( type_ )
-      , tiling( tiling_ )
-      , usage( usage_ )
-      , flags( flags_ )
-    {
-    }
-
-    PhysicalDeviceImageFormatInfo2( VkPhysicalDeviceImageFormatInfo2 const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceImageFormatInfo2 ) );
-    }
-
-    PhysicalDeviceImageFormatInfo2& operator=( VkPhysicalDeviceImageFormatInfo2 const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceImageFormatInfo2 ) );
-      return *this;
-    }
-    PhysicalDeviceImageFormatInfo2& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    PhysicalDeviceImageFormatInfo2& setFormat( Format format_ )
-    {
-      format = format_;
-      return *this;
-    }
-
-    PhysicalDeviceImageFormatInfo2& setType( ImageType type_ )
-    {
-      type = type_;
-      return *this;
-    }
-
-    PhysicalDeviceImageFormatInfo2& setTiling( ImageTiling tiling_ )
-    {
-      tiling = tiling_;
-      return *this;
-    }
-
-    PhysicalDeviceImageFormatInfo2& setUsage( ImageUsageFlags usage_ )
-    {
-      usage = usage_;
-      return *this;
-    }
-
-    PhysicalDeviceImageFormatInfo2& setFlags( ImageCreateFlags flags_ )
-    {
-      flags = flags_;
-      return *this;
-    }
-
-    operator VkPhysicalDeviceImageFormatInfo2 const&() const
-    {
-      return *reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2*>(this);
-    }
-
-    operator VkPhysicalDeviceImageFormatInfo2 &()
-    {
-      return *reinterpret_cast<VkPhysicalDeviceImageFormatInfo2*>(this);
-    }
-
-    bool operator==( PhysicalDeviceImageFormatInfo2 const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( format == rhs.format )
-          && ( type == rhs.type )
-          && ( tiling == rhs.tiling )
-          && ( usage == rhs.usage )
-          && ( flags == rhs.flags );
-    }
-
-    bool operator!=( PhysicalDeviceImageFormatInfo2 const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePhysicalDeviceImageFormatInfo2;
-
-  public:
-    const void* pNext = nullptr;
-    Format format;
-    ImageType type;
-    ImageTiling tiling;
-    ImageUsageFlags usage;
-    ImageCreateFlags flags;
-  };
-  static_assert( sizeof( PhysicalDeviceImageFormatInfo2 ) == sizeof( VkPhysicalDeviceImageFormatInfo2 ), "struct and wrapper have different size!" );
-
-  using PhysicalDeviceImageFormatInfo2KHR = PhysicalDeviceImageFormatInfo2;
-
-  enum class PipelineCreateFlagBits
-  {
-    eDisableOptimization = VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT,
-    eAllowDerivatives = VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT,
-    eDerivative = VK_PIPELINE_CREATE_DERIVATIVE_BIT,
-    eViewIndexFromDeviceIndex = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT,
-    eViewIndexFromDeviceIndexKHR = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT,
-    eDispatchBase = VK_PIPELINE_CREATE_DISPATCH_BASE,
-    eDispatchBaseKHR = VK_PIPELINE_CREATE_DISPATCH_BASE,
-    eDeferCompileNV = VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV
-  };
-
-  using PipelineCreateFlags = Flags<PipelineCreateFlagBits, VkPipelineCreateFlags>;
-
-  VULKAN_HPP_INLINE PipelineCreateFlags operator|( PipelineCreateFlagBits bit0, PipelineCreateFlagBits bit1 )
-  {
-    return PipelineCreateFlags( bit0 ) | bit1;
-  }
-
-  VULKAN_HPP_INLINE PipelineCreateFlags operator~( PipelineCreateFlagBits bits )
-  {
-    return ~( PipelineCreateFlags( bits ) );
-  }
-
-  template <> struct FlagTraits<PipelineCreateFlagBits>
-  {
-    enum
-    {
-      allFlags = VkFlags(PipelineCreateFlagBits::eDisableOptimization) | VkFlags(PipelineCreateFlagBits::eAllowDerivatives) | VkFlags(PipelineCreateFlagBits::eDerivative) | VkFlags(PipelineCreateFlagBits::eViewIndexFromDeviceIndex) | VkFlags(PipelineCreateFlagBits::eDispatchBase) | VkFlags(PipelineCreateFlagBits::eDeferCompileNV)
-    };
-  };
-
-  struct ComputePipelineCreateInfo
-  {
-    ComputePipelineCreateInfo( PipelineCreateFlags flags_ = PipelineCreateFlags(),
-                               PipelineShaderStageCreateInfo stage_ = PipelineShaderStageCreateInfo(),
-                               PipelineLayout layout_ = PipelineLayout(),
-                               Pipeline basePipelineHandle_ = Pipeline(),
-                               int32_t basePipelineIndex_ = 0 )
-      : flags( flags_ )
-      , stage( stage_ )
-      , layout( layout_ )
-      , basePipelineHandle( basePipelineHandle_ )
-      , basePipelineIndex( basePipelineIndex_ )
-    {
-    }
-
-    ComputePipelineCreateInfo( VkComputePipelineCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ComputePipelineCreateInfo ) );
-    }
-
-    ComputePipelineCreateInfo& operator=( VkComputePipelineCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ComputePipelineCreateInfo ) );
-      return *this;
-    }
-    ComputePipelineCreateInfo& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    ComputePipelineCreateInfo& setFlags( PipelineCreateFlags flags_ )
-    {
-      flags = flags_;
-      return *this;
-    }
-
-    ComputePipelineCreateInfo& setStage( PipelineShaderStageCreateInfo stage_ )
-    {
-      stage = stage_;
-      return *this;
-    }
-
-    ComputePipelineCreateInfo& setLayout( PipelineLayout layout_ )
-    {
-      layout = layout_;
-      return *this;
-    }
-
-    ComputePipelineCreateInfo& setBasePipelineHandle( Pipeline basePipelineHandle_ )
-    {
-      basePipelineHandle = basePipelineHandle_;
-      return *this;
-    }
-
-    ComputePipelineCreateInfo& setBasePipelineIndex( int32_t basePipelineIndex_ )
-    {
-      basePipelineIndex = basePipelineIndex_;
-      return *this;
-    }
-
-    operator VkComputePipelineCreateInfo const&() const
-    {
-      return *reinterpret_cast<const VkComputePipelineCreateInfo*>(this);
-    }
-
-    operator VkComputePipelineCreateInfo &()
-    {
-      return *reinterpret_cast<VkComputePipelineCreateInfo*>(this);
-    }
-
-    bool operator==( ComputePipelineCreateInfo const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( stage == rhs.stage )
-          && ( layout == rhs.layout )
-          && ( basePipelineHandle == rhs.basePipelineHandle )
-          && ( basePipelineIndex == rhs.basePipelineIndex );
-    }
-
-    bool operator!=( ComputePipelineCreateInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eComputePipelineCreateInfo;
-
-  public:
-    const void* pNext = nullptr;
-    PipelineCreateFlags flags;
-    PipelineShaderStageCreateInfo stage;
-    PipelineLayout layout;
-    Pipeline basePipelineHandle;
-    int32_t basePipelineIndex;
-  };
-  static_assert( sizeof( ComputePipelineCreateInfo ) == sizeof( VkComputePipelineCreateInfo ), "struct and wrapper have different size!" );
-
-  enum class ColorComponentFlagBits
-  {
-    eR = VK_COLOR_COMPONENT_R_BIT,
-    eG = VK_COLOR_COMPONENT_G_BIT,
-    eB = VK_COLOR_COMPONENT_B_BIT,
-    eA = VK_COLOR_COMPONENT_A_BIT
-  };
-
-  using ColorComponentFlags = Flags<ColorComponentFlagBits, VkColorComponentFlags>;
-
-  VULKAN_HPP_INLINE ColorComponentFlags operator|( ColorComponentFlagBits bit0, ColorComponentFlagBits bit1 )
-  {
-    return ColorComponentFlags( bit0 ) | bit1;
-  }
-
-  VULKAN_HPP_INLINE ColorComponentFlags operator~( ColorComponentFlagBits bits )
-  {
-    return ~( ColorComponentFlags( bits ) );
-  }
-
-  template <> struct FlagTraits<ColorComponentFlagBits>
-  {
-    enum
-    {
-      allFlags = VkFlags(ColorComponentFlagBits::eR) | VkFlags(ColorComponentFlagBits::eG) | VkFlags(ColorComponentFlagBits::eB) | VkFlags(ColorComponentFlagBits::eA)
-    };
-  };
-
-  struct PipelineColorBlendAttachmentState
-  {
-    PipelineColorBlendAttachmentState( Bool32 blendEnable_ = 0,
-                                       BlendFactor srcColorBlendFactor_ = BlendFactor::eZero,
-                                       BlendFactor dstColorBlendFactor_ = BlendFactor::eZero,
-                                       BlendOp colorBlendOp_ = BlendOp::eAdd,
-                                       BlendFactor srcAlphaBlendFactor_ = BlendFactor::eZero,
-                                       BlendFactor dstAlphaBlendFactor_ = BlendFactor::eZero,
-                                       BlendOp alphaBlendOp_ = BlendOp::eAdd,
-                                       ColorComponentFlags colorWriteMask_ = ColorComponentFlags() )
-      : blendEnable( blendEnable_ )
-      , srcColorBlendFactor( srcColorBlendFactor_ )
-      , dstColorBlendFactor( dstColorBlendFactor_ )
-      , colorBlendOp( colorBlendOp_ )
-      , srcAlphaBlendFactor( srcAlphaBlendFactor_ )
-      , dstAlphaBlendFactor( dstAlphaBlendFactor_ )
-      , alphaBlendOp( alphaBlendOp_ )
-      , colorWriteMask( colorWriteMask_ )
-    {
-    }
-
-    PipelineColorBlendAttachmentState( VkPipelineColorBlendAttachmentState const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PipelineColorBlendAttachmentState ) );
-    }
-
-    PipelineColorBlendAttachmentState& operator=( VkPipelineColorBlendAttachmentState const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PipelineColorBlendAttachmentState ) );
-      return *this;
-    }
-    PipelineColorBlendAttachmentState& setBlendEnable( Bool32 blendEnable_ )
-    {
-      blendEnable = blendEnable_;
-      return *this;
-    }
-
-    PipelineColorBlendAttachmentState& setSrcColorBlendFactor( BlendFactor srcColorBlendFactor_ )
-    {
-      srcColorBlendFactor = srcColorBlendFactor_;
-      return *this;
-    }
-
-    PipelineColorBlendAttachmentState& setDstColorBlendFactor( BlendFactor dstColorBlendFactor_ )
-    {
-      dstColorBlendFactor = dstColorBlendFactor_;
-      return *this;
-    }
-
-    PipelineColorBlendAttachmentState& setColorBlendOp( BlendOp colorBlendOp_ )
-    {
-      colorBlendOp = colorBlendOp_;
-      return *this;
-    }
-
-    PipelineColorBlendAttachmentState& setSrcAlphaBlendFactor( BlendFactor srcAlphaBlendFactor_ )
-    {
-      srcAlphaBlendFactor = srcAlphaBlendFactor_;
-      return *this;
-    }
-
-    PipelineColorBlendAttachmentState& setDstAlphaBlendFactor( BlendFactor dstAlphaBlendFactor_ )
-    {
-      dstAlphaBlendFactor = dstAlphaBlendFactor_;
-      return *this;
-    }
-
-    PipelineColorBlendAttachmentState& setAlphaBlendOp( BlendOp alphaBlendOp_ )
-    {
-      alphaBlendOp = alphaBlendOp_;
-      return *this;
-    }
-
-    PipelineColorBlendAttachmentState& setColorWriteMask( ColorComponentFlags colorWriteMask_ )
-    {
-      colorWriteMask = colorWriteMask_;
-      return *this;
-    }
-
-    operator VkPipelineColorBlendAttachmentState const&() const
-    {
-      return *reinterpret_cast<const VkPipelineColorBlendAttachmentState*>(this);
-    }
-
-    operator VkPipelineColorBlendAttachmentState &()
-    {
-      return *reinterpret_cast<VkPipelineColorBlendAttachmentState*>(this);
-    }
-
-    bool operator==( PipelineColorBlendAttachmentState const& rhs ) const
-    {
-      return ( blendEnable == rhs.blendEnable )
-          && ( srcColorBlendFactor == rhs.srcColorBlendFactor )
-          && ( dstColorBlendFactor == rhs.dstColorBlendFactor )
-          && ( colorBlendOp == rhs.colorBlendOp )
-          && ( srcAlphaBlendFactor == rhs.srcAlphaBlendFactor )
-          && ( dstAlphaBlendFactor == rhs.dstAlphaBlendFactor )
-          && ( alphaBlendOp == rhs.alphaBlendOp )
-          && ( colorWriteMask == rhs.colorWriteMask );
-    }
-
-    bool operator!=( PipelineColorBlendAttachmentState const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    Bool32 blendEnable;
-    BlendFactor srcColorBlendFactor;
-    BlendFactor dstColorBlendFactor;
-    BlendOp colorBlendOp;
-    BlendFactor srcAlphaBlendFactor;
-    BlendFactor dstAlphaBlendFactor;
-    BlendOp alphaBlendOp;
-    ColorComponentFlags colorWriteMask;
-  };
-  static_assert( sizeof( PipelineColorBlendAttachmentState ) == sizeof( VkPipelineColorBlendAttachmentState ), "struct and wrapper have different size!" );
-
-  struct PipelineColorBlendStateCreateInfo
-  {
-    PipelineColorBlendStateCreateInfo( PipelineColorBlendStateCreateFlags flags_ = PipelineColorBlendStateCreateFlags(),
-                                       Bool32 logicOpEnable_ = 0,
-                                       LogicOp logicOp_ = LogicOp::eClear,
-                                       uint32_t attachmentCount_ = 0,
-                                       const PipelineColorBlendAttachmentState* pAttachments_ = nullptr,
-                                       std::array<float,4> const& blendConstants_ = { { 0, 0, 0, 0 } } )
-      : flags( flags_ )
-      , logicOpEnable( logicOpEnable_ )
-      , logicOp( logicOp_ )
-      , attachmentCount( attachmentCount_ )
-      , pAttachments( pAttachments_ )
-    {
-      memcpy( &blendConstants, blendConstants_.data(), 4 * sizeof( float ) );
-    }
-
-    PipelineColorBlendStateCreateInfo( VkPipelineColorBlendStateCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PipelineColorBlendStateCreateInfo ) );
-    }
-
-    PipelineColorBlendStateCreateInfo& operator=( VkPipelineColorBlendStateCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PipelineColorBlendStateCreateInfo ) );
-      return *this;
-    }
-    PipelineColorBlendStateCreateInfo& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    PipelineColorBlendStateCreateInfo& setFlags( PipelineColorBlendStateCreateFlags flags_ )
-    {
-      flags = flags_;
-      return *this;
-    }
-
-    PipelineColorBlendStateCreateInfo& setLogicOpEnable( Bool32 logicOpEnable_ )
-    {
-      logicOpEnable = logicOpEnable_;
-      return *this;
-    }
-
-    PipelineColorBlendStateCreateInfo& setLogicOp( LogicOp logicOp_ )
-    {
-      logicOp = logicOp_;
-      return *this;
-    }
-
-    PipelineColorBlendStateCreateInfo& setAttachmentCount( uint32_t attachmentCount_ )
-    {
-      attachmentCount = attachmentCount_;
-      return *this;
-    }
-
-    PipelineColorBlendStateCreateInfo& setPAttachments( const PipelineColorBlendAttachmentState* pAttachments_ )
-    {
-      pAttachments = pAttachments_;
-      return *this;
-    }
-
-    PipelineColorBlendStateCreateInfo& setBlendConstants( std::array<float,4> blendConstants_ )
-    {
-      memcpy( &blendConstants, blendConstants_.data(), 4 * sizeof( float ) );
-      return *this;
-    }
-
-    operator VkPipelineColorBlendStateCreateInfo const&() const
-    {
-      return *reinterpret_cast<const VkPipelineColorBlendStateCreateInfo*>(this);
-    }
-
-    operator VkPipelineColorBlendStateCreateInfo &()
-    {
-      return *reinterpret_cast<VkPipelineColorBlendStateCreateInfo*>(this);
-    }
-
-    bool operator==( PipelineColorBlendStateCreateInfo const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( logicOpEnable == rhs.logicOpEnable )
-          && ( logicOp == rhs.logicOp )
-          && ( attachmentCount == rhs.attachmentCount )
-          && ( pAttachments == rhs.pAttachments )
-          && ( memcmp( blendConstants, rhs.blendConstants, 4 * sizeof( float ) ) == 0 );
-    }
-
-    bool operator!=( PipelineColorBlendStateCreateInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePipelineColorBlendStateCreateInfo;
-
-  public:
-    const void* pNext = nullptr;
-    PipelineColorBlendStateCreateFlags flags;
-    Bool32 logicOpEnable;
-    LogicOp logicOp;
-    uint32_t attachmentCount;
-    const PipelineColorBlendAttachmentState* pAttachments;
-    float blendConstants[4];
-  };
-  static_assert( sizeof( PipelineColorBlendStateCreateInfo ) == sizeof( VkPipelineColorBlendStateCreateInfo ), "struct and wrapper have different size!" );
-
-  enum class FenceCreateFlagBits
-  {
-    eSignaled = VK_FENCE_CREATE_SIGNALED_BIT
-  };
-
-  using FenceCreateFlags = Flags<FenceCreateFlagBits, VkFenceCreateFlags>;
-
-  VULKAN_HPP_INLINE FenceCreateFlags operator|( FenceCreateFlagBits bit0, FenceCreateFlagBits bit1 )
-  {
-    return FenceCreateFlags( bit0 ) | bit1;
-  }
-
-  VULKAN_HPP_INLINE FenceCreateFlags operator~( FenceCreateFlagBits bits )
-  {
-    return ~( FenceCreateFlags( bits ) );
-  }
-
-  template <> struct FlagTraits<FenceCreateFlagBits>
-  {
-    enum
-    {
-      allFlags = VkFlags(FenceCreateFlagBits::eSignaled)
-    };
-  };
-
-  struct FenceCreateInfo
-  {
-    FenceCreateInfo( FenceCreateFlags flags_ = FenceCreateFlags() )
-      : flags( flags_ )
-    {
-    }
-
-    FenceCreateInfo( VkFenceCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( FenceCreateInfo ) );
-    }
-
-    FenceCreateInfo& operator=( VkFenceCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( FenceCreateInfo ) );
-      return *this;
-    }
-    FenceCreateInfo& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    FenceCreateInfo& setFlags( FenceCreateFlags flags_ )
-    {
-      flags = flags_;
-      return *this;
-    }
-
-    operator VkFenceCreateInfo const&() const
-    {
-      return *reinterpret_cast<const VkFenceCreateInfo*>(this);
-    }
-
-    operator VkFenceCreateInfo &()
-    {
-      return *reinterpret_cast<VkFenceCreateInfo*>(this);
-    }
-
-    bool operator==( FenceCreateInfo const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags );
-    }
-
-    bool operator!=( FenceCreateInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eFenceCreateInfo;
-
-  public:
-    const void* pNext = nullptr;
-    FenceCreateFlags flags;
-  };
-  static_assert( sizeof( FenceCreateInfo ) == sizeof( VkFenceCreateInfo ), "struct and wrapper have different size!" );
-
-  enum class FormatFeatureFlagBits
-  {
-    eSampledImage = VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT,
-    eStorageImage = VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT,
-    eStorageImageAtomic = VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT,
-    eUniformTexelBuffer = VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT,
-    eStorageTexelBuffer = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT,
-    eStorageTexelBufferAtomic = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT,
-    eVertexBuffer = VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT,
-    eColorAttachment = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT,
-    eColorAttachmentBlend = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT,
-    eDepthStencilAttachment = VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT,
-    eBlitSrc = VK_FORMAT_FEATURE_BLIT_SRC_BIT,
-    eBlitDst = VK_FORMAT_FEATURE_BLIT_DST_BIT,
-    eSampledImageFilterLinear = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT,
-    eTransferSrc = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT,
-    eTransferSrcKHR = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT,
-    eTransferDst = VK_FORMAT_FEATURE_TRANSFER_DST_BIT,
-    eTransferDstKHR = VK_FORMAT_FEATURE_TRANSFER_DST_BIT,
-    eMidpointChromaSamples = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT,
-    eMidpointChromaSamplesKHR = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT,
-    eSampledImageYcbcrConversionLinearFilter = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT,
-    eSampledImageYcbcrConversionLinearFilterKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT,
-    eSampledImageYcbcrConversionSeparateReconstructionFilter = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT,
-    eSampledImageYcbcrConversionSeparateReconstructionFilterKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT,
-    eSampledImageYcbcrConversionChromaReconstructionExplicit = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT,
-    eSampledImageYcbcrConversionChromaReconstructionExplicitKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT,
-    eSampledImageYcbcrConversionChromaReconstructionExplicitForceable = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT,
-    eSampledImageYcbcrConversionChromaReconstructionExplicitForceableKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT,
-    eDisjoint = VK_FORMAT_FEATURE_DISJOINT_BIT,
-    eDisjointKHR = VK_FORMAT_FEATURE_DISJOINT_BIT,
-    eCositedChromaSamples = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT,
-    eCositedChromaSamplesKHR = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT,
-    eSampledImageFilterCubicIMG = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG,
-    eSampledImageFilterMinmaxEXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT
-  };
-
-  using FormatFeatureFlags = Flags<FormatFeatureFlagBits, VkFormatFeatureFlags>;
-
-  VULKAN_HPP_INLINE FormatFeatureFlags operator|( FormatFeatureFlagBits bit0, FormatFeatureFlagBits bit1 )
-  {
-    return FormatFeatureFlags( bit0 ) | bit1;
-  }
-
-  VULKAN_HPP_INLINE FormatFeatureFlags operator~( FormatFeatureFlagBits bits )
-  {
-    return ~( FormatFeatureFlags( bits ) );
-  }
-
-  template <> struct FlagTraits<FormatFeatureFlagBits>
-  {
-    enum
-    {
-      allFlags = VkFlags(FormatFeatureFlagBits::eSampledImage) | VkFlags(FormatFeatureFlagBits::eStorageImage) | VkFlags(FormatFeatureFlagBits::eStorageImageAtomic) | VkFlags(FormatFeatureFlagBits::eUniformTexelBuffer) | VkFlags(FormatFeatureFlagBits::eStorageTexelBuffer) | VkFlags(FormatFeatureFlagBits::eStorageTexelBufferAtomic) | VkFlags(FormatFeatureFlagBits::eVertexBuffer) | VkFlags(FormatFeatureFlagBits::eColorAttachment) | VkFlags(FormatFeatureFlagBits::eColorAttachmentBlend) | VkFlags(FormatFeatureFlagBits::eDepthStencilAttachment) | VkFlags(FormatFeatureFlagBits::eBlitSrc) | VkFlags(FormatFeatureFlagBits::eBlitDst) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterLinear) | VkFlags(FormatFeatureFlagBits::eTransferSrc) | VkFlags(FormatFeatureFlagBits::eTransferDst) | VkFlags(FormatFeatureFlagBits::eMidpointChromaSamples) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable) | VkFlags(FormatFeatureFlagBits::eDisjoint) | VkFlags(FormatFeatureFlagBits::eCositedChromaSamples) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterCubicIMG) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterMinmaxEXT)
-    };
-  };
-
-  struct FormatProperties
-  {
-    operator VkFormatProperties const&() const
-    {
-      return *reinterpret_cast<const VkFormatProperties*>(this);
-    }
-
-    operator VkFormatProperties &()
-    {
-      return *reinterpret_cast<VkFormatProperties*>(this);
-    }
-
-    bool operator==( FormatProperties const& rhs ) const
-    {
-      return ( linearTilingFeatures == rhs.linearTilingFeatures )
-          && ( optimalTilingFeatures == rhs.optimalTilingFeatures )
-          && ( bufferFeatures == rhs.bufferFeatures );
-    }
-
-    bool operator!=( FormatProperties const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    FormatFeatureFlags linearTilingFeatures;
-    FormatFeatureFlags optimalTilingFeatures;
-    FormatFeatureFlags bufferFeatures;
-  };
-  static_assert( sizeof( FormatProperties ) == sizeof( VkFormatProperties ), "struct and wrapper have different size!" );
-
-  struct FormatProperties2
-  {
-    operator VkFormatProperties2 const&() const
-    {
-      return *reinterpret_cast<const VkFormatProperties2*>(this);
-    }
-
-    operator VkFormatProperties2 &()
-    {
-      return *reinterpret_cast<VkFormatProperties2*>(this);
-    }
-
-    bool operator==( FormatProperties2 const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( formatProperties == rhs.formatProperties );
-    }
-
-    bool operator!=( FormatProperties2 const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eFormatProperties2;
-
-  public:
-    void* pNext = nullptr;
-    FormatProperties formatProperties;
-  };
-  static_assert( sizeof( FormatProperties2 ) == sizeof( VkFormatProperties2 ), "struct and wrapper have different size!" );
-
-  using FormatProperties2KHR = FormatProperties2;
-
-  struct DrmFormatModifierPropertiesEXT
-  {
-    operator VkDrmFormatModifierPropertiesEXT const&() const
-    {
-      return *reinterpret_cast<const VkDrmFormatModifierPropertiesEXT*>(this);
-    }
-
-    operator VkDrmFormatModifierPropertiesEXT &()
-    {
-      return *reinterpret_cast<VkDrmFormatModifierPropertiesEXT*>(this);
-    }
-
-    bool operator==( DrmFormatModifierPropertiesEXT const& rhs ) const
-    {
-      return ( drmFormatModifier == rhs.drmFormatModifier )
-          && ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount )
-          && ( drmFormatModifierTilingFeatures == rhs.drmFormatModifierTilingFeatures );
-    }
-
-    bool operator!=( DrmFormatModifierPropertiesEXT const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    uint64_t drmFormatModifier;
-    uint32_t drmFormatModifierPlaneCount;
-    FormatFeatureFlags drmFormatModifierTilingFeatures;
-  };
-  static_assert( sizeof( DrmFormatModifierPropertiesEXT ) == sizeof( VkDrmFormatModifierPropertiesEXT ), "struct and wrapper have different size!" );
-
-  struct DrmFormatModifierPropertiesListEXT
-  {
-    DrmFormatModifierPropertiesListEXT( uint32_t drmFormatModifierCount_ = 0,
-                                        DrmFormatModifierPropertiesEXT* pDrmFormatModifierProperties_ = nullptr )
-      : drmFormatModifierCount( drmFormatModifierCount_ )
-      , pDrmFormatModifierProperties( pDrmFormatModifierProperties_ )
-    {
-    }
-
-    DrmFormatModifierPropertiesListEXT( VkDrmFormatModifierPropertiesListEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DrmFormatModifierPropertiesListEXT ) );
-    }
-
-    DrmFormatModifierPropertiesListEXT& operator=( VkDrmFormatModifierPropertiesListEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DrmFormatModifierPropertiesListEXT ) );
-      return *this;
-    }
-    DrmFormatModifierPropertiesListEXT& setPNext( void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    DrmFormatModifierPropertiesListEXT& setDrmFormatModifierCount( uint32_t drmFormatModifierCount_ )
-    {
-      drmFormatModifierCount = drmFormatModifierCount_;
-      return *this;
-    }
-
-    DrmFormatModifierPropertiesListEXT& setPDrmFormatModifierProperties( DrmFormatModifierPropertiesEXT* pDrmFormatModifierProperties_ )
-    {
-      pDrmFormatModifierProperties = pDrmFormatModifierProperties_;
-      return *this;
-    }
-
-    operator VkDrmFormatModifierPropertiesListEXT const&() const
-    {
-      return *reinterpret_cast<const VkDrmFormatModifierPropertiesListEXT*>(this);
-    }
-
-    operator VkDrmFormatModifierPropertiesListEXT &()
-    {
-      return *reinterpret_cast<VkDrmFormatModifierPropertiesListEXT*>(this);
-    }
-
-    bool operator==( DrmFormatModifierPropertiesListEXT const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( drmFormatModifierCount == rhs.drmFormatModifierCount )
-          && ( pDrmFormatModifierProperties == rhs.pDrmFormatModifierProperties );
-    }
-
-    bool operator!=( DrmFormatModifierPropertiesListEXT const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eDrmFormatModifierPropertiesListEXT;
-
-  public:
-    void* pNext = nullptr;
-    uint32_t drmFormatModifierCount;
-    DrmFormatModifierPropertiesEXT* pDrmFormatModifierProperties;
-  };
-  static_assert( sizeof( DrmFormatModifierPropertiesListEXT ) == sizeof( VkDrmFormatModifierPropertiesListEXT ), "struct and wrapper have different size!" );
-
-  enum class QueryControlFlagBits
-  {
-    ePrecise = VK_QUERY_CONTROL_PRECISE_BIT
-  };
-
-  using QueryControlFlags = Flags<QueryControlFlagBits, VkQueryControlFlags>;
-
-  VULKAN_HPP_INLINE QueryControlFlags operator|( QueryControlFlagBits bit0, QueryControlFlagBits bit1 )
-  {
-    return QueryControlFlags( bit0 ) | bit1;
-  }
-
-  VULKAN_HPP_INLINE QueryControlFlags operator~( QueryControlFlagBits bits )
-  {
-    return ~( QueryControlFlags( bits ) );
-  }
-
-  template <> struct FlagTraits<QueryControlFlagBits>
-  {
-    enum
-    {
-      allFlags = VkFlags(QueryControlFlagBits::ePrecise)
-    };
-  };
-
-  enum class QueryResultFlagBits
-  {
-    e64 = VK_QUERY_RESULT_64_BIT,
-    eWait = VK_QUERY_RESULT_WAIT_BIT,
-    eWithAvailability = VK_QUERY_RESULT_WITH_AVAILABILITY_BIT,
-    ePartial = VK_QUERY_RESULT_PARTIAL_BIT
-  };
-
-  using QueryResultFlags = Flags<QueryResultFlagBits, VkQueryResultFlags>;
-
-  VULKAN_HPP_INLINE QueryResultFlags operator|( QueryResultFlagBits bit0, QueryResultFlagBits bit1 )
-  {
-    return QueryResultFlags( bit0 ) | bit1;
-  }
-
-  VULKAN_HPP_INLINE QueryResultFlags operator~( QueryResultFlagBits bits )
-  {
-    return ~( QueryResultFlags( bits ) );
-  }
-
-  template <> struct FlagTraits<QueryResultFlagBits>
-  {
-    enum
-    {
-      allFlags = VkFlags(QueryResultFlagBits::e64) | VkFlags(QueryResultFlagBits::eWait) | VkFlags(QueryResultFlagBits::eWithAvailability) | VkFlags(QueryResultFlagBits::ePartial)
-    };
-  };
-
-  enum class CommandBufferUsageFlagBits
-  {
-    eOneTimeSubmit = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
-    eRenderPassContinue = VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT,
-    eSimultaneousUse = VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT
-  };
-
-  using CommandBufferUsageFlags = Flags<CommandBufferUsageFlagBits, VkCommandBufferUsageFlags>;
-
-  VULKAN_HPP_INLINE CommandBufferUsageFlags operator|( CommandBufferUsageFlagBits bit0, CommandBufferUsageFlagBits bit1 )
-  {
-    return CommandBufferUsageFlags( bit0 ) | bit1;
-  }
-
-  VULKAN_HPP_INLINE CommandBufferUsageFlags operator~( CommandBufferUsageFlagBits bits )
-  {
-    return ~( CommandBufferUsageFlags( bits ) );
-  }
-
-  template <> struct FlagTraits<CommandBufferUsageFlagBits>
-  {
-    enum
-    {
-      allFlags = VkFlags(CommandBufferUsageFlagBits::eOneTimeSubmit) | VkFlags(CommandBufferUsageFlagBits::eRenderPassContinue) | VkFlags(CommandBufferUsageFlagBits::eSimultaneousUse)
-    };
-  };
-
-  enum class QueryPipelineStatisticFlagBits
-  {
-    eInputAssemblyVertices = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT,
-    eInputAssemblyPrimitives = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT,
-    eVertexShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT,
-    eGeometryShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT,
-    eGeometryShaderPrimitives = VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT,
-    eClippingInvocations = VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT,
-    eClippingPrimitives = VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT,
-    eFragmentShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT,
-    eTessellationControlShaderPatches = VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT,
-    eTessellationEvaluationShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT,
-    eComputeShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT
-  };
-
-  using QueryPipelineStatisticFlags = Flags<QueryPipelineStatisticFlagBits, VkQueryPipelineStatisticFlags>;
-
-  VULKAN_HPP_INLINE QueryPipelineStatisticFlags operator|( QueryPipelineStatisticFlagBits bit0, QueryPipelineStatisticFlagBits bit1 )
-  {
-    return QueryPipelineStatisticFlags( bit0 ) | bit1;
-  }
-
-  VULKAN_HPP_INLINE QueryPipelineStatisticFlags operator~( QueryPipelineStatisticFlagBits bits )
-  {
-    return ~( QueryPipelineStatisticFlags( bits ) );
-  }
-
-  template <> struct FlagTraits<QueryPipelineStatisticFlagBits>
-  {
-    enum
-    {
-      allFlags = VkFlags(QueryPipelineStatisticFlagBits::eInputAssemblyVertices) | VkFlags(QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives) | VkFlags(QueryPipelineStatisticFlagBits::eVertexShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eGeometryShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives) | VkFlags(QueryPipelineStatisticFlagBits::eClippingInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eClippingPrimitives) | VkFlags(QueryPipelineStatisticFlagBits::eFragmentShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches) | VkFlags(QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eComputeShaderInvocations)
-    };
-  };
-
-  struct CommandBufferInheritanceInfo
-  {
-    CommandBufferInheritanceInfo( RenderPass renderPass_ = RenderPass(),
-                                  uint32_t subpass_ = 0,
-                                  Framebuffer framebuffer_ = Framebuffer(),
-                                  Bool32 occlusionQueryEnable_ = 0,
-                                  QueryControlFlags queryFlags_ = QueryControlFlags(),
-                                  QueryPipelineStatisticFlags pipelineStatistics_ = QueryPipelineStatisticFlags() )
-      : renderPass( renderPass_ )
-      , subpass( subpass_ )
-      , framebuffer( framebuffer_ )
-      , occlusionQueryEnable( occlusionQueryEnable_ )
-      , queryFlags( queryFlags_ )
-      , pipelineStatistics( pipelineStatistics_ )
-    {
-    }
-
-    CommandBufferInheritanceInfo( VkCommandBufferInheritanceInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( CommandBufferInheritanceInfo ) );
-    }
-
-    CommandBufferInheritanceInfo& operator=( VkCommandBufferInheritanceInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( CommandBufferInheritanceInfo ) );
-      return *this;
-    }
-    CommandBufferInheritanceInfo& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    CommandBufferInheritanceInfo& setRenderPass( RenderPass renderPass_ )
-    {
-      renderPass = renderPass_;
-      return *this;
-    }
-
-    CommandBufferInheritanceInfo& setSubpass( uint32_t subpass_ )
-    {
-      subpass = subpass_;
-      return *this;
-    }
-
-    CommandBufferInheritanceInfo& setFramebuffer( Framebuffer framebuffer_ )
-    {
-      framebuffer = framebuffer_;
-      return *this;
-    }
-
-    CommandBufferInheritanceInfo& setOcclusionQueryEnable( Bool32 occlusionQueryEnable_ )
-    {
-      occlusionQueryEnable = occlusionQueryEnable_;
-      return *this;
-    }
-
-    CommandBufferInheritanceInfo& setQueryFlags( QueryControlFlags queryFlags_ )
-    {
-      queryFlags = queryFlags_;
-      return *this;
-    }
-
-    CommandBufferInheritanceInfo& setPipelineStatistics( QueryPipelineStatisticFlags pipelineStatistics_ )
-    {
-      pipelineStatistics = pipelineStatistics_;
-      return *this;
-    }
-
-    operator VkCommandBufferInheritanceInfo const&() const
-    {
-      return *reinterpret_cast<const VkCommandBufferInheritanceInfo*>(this);
-    }
-
-    operator VkCommandBufferInheritanceInfo &()
-    {
-      return *reinterpret_cast<VkCommandBufferInheritanceInfo*>(this);
-    }
-
-    bool operator==( CommandBufferInheritanceInfo const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( renderPass == rhs.renderPass )
-          && ( subpass == rhs.subpass )
-          && ( framebuffer == rhs.framebuffer )
-          && ( occlusionQueryEnable == rhs.occlusionQueryEnable )
-          && ( queryFlags == rhs.queryFlags )
-          && ( pipelineStatistics == rhs.pipelineStatistics );
-    }
-
-    bool operator!=( CommandBufferInheritanceInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eCommandBufferInheritanceInfo;
-
-  public:
-    const void* pNext = nullptr;
-    RenderPass renderPass;
-    uint32_t subpass;
-    Framebuffer framebuffer;
-    Bool32 occlusionQueryEnable;
-    QueryControlFlags queryFlags;
-    QueryPipelineStatisticFlags pipelineStatistics;
-  };
-  static_assert( sizeof( CommandBufferInheritanceInfo ) == sizeof( VkCommandBufferInheritanceInfo ), "struct and wrapper have different size!" );
-
-  struct CommandBufferBeginInfo
-  {
-    CommandBufferBeginInfo( CommandBufferUsageFlags flags_ = CommandBufferUsageFlags(),
-                            const CommandBufferInheritanceInfo* pInheritanceInfo_ = nullptr )
-      : flags( flags_ )
-      , pInheritanceInfo( pInheritanceInfo_ )
-    {
-    }
-
-    CommandBufferBeginInfo( VkCommandBufferBeginInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( CommandBufferBeginInfo ) );
-    }
-
-    CommandBufferBeginInfo& operator=( VkCommandBufferBeginInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( CommandBufferBeginInfo ) );
-      return *this;
-    }
-    CommandBufferBeginInfo& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    CommandBufferBeginInfo& setFlags( CommandBufferUsageFlags flags_ )
-    {
-      flags = flags_;
-      return *this;
-    }
-
-    CommandBufferBeginInfo& setPInheritanceInfo( const CommandBufferInheritanceInfo* pInheritanceInfo_ )
-    {
-      pInheritanceInfo = pInheritanceInfo_;
-      return *this;
-    }
-
-    operator VkCommandBufferBeginInfo const&() const
-    {
-      return *reinterpret_cast<const VkCommandBufferBeginInfo*>(this);
-    }
-
-    operator VkCommandBufferBeginInfo &()
-    {
-      return *reinterpret_cast<VkCommandBufferBeginInfo*>(this);
-    }
-
-    bool operator==( CommandBufferBeginInfo const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( pInheritanceInfo == rhs.pInheritanceInfo );
-    }
-
-    bool operator!=( CommandBufferBeginInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eCommandBufferBeginInfo;
-
-  public:
-    const void* pNext = nullptr;
-    CommandBufferUsageFlags flags;
-    const CommandBufferInheritanceInfo* pInheritanceInfo;
-  };
-  static_assert( sizeof( CommandBufferBeginInfo ) == sizeof( VkCommandBufferBeginInfo ), "struct and wrapper have different size!" );
-
-  struct QueryPoolCreateInfo
-  {
-    QueryPoolCreateInfo( QueryPoolCreateFlags flags_ = QueryPoolCreateFlags(),
-                         QueryType queryType_ = QueryType::eOcclusion,
-                         uint32_t queryCount_ = 0,
-                         QueryPipelineStatisticFlags pipelineStatistics_ = QueryPipelineStatisticFlags() )
-      : flags( flags_ )
-      , queryType( queryType_ )
-      , queryCount( queryCount_ )
-      , pipelineStatistics( pipelineStatistics_ )
-    {
-    }
-
-    QueryPoolCreateInfo( VkQueryPoolCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( QueryPoolCreateInfo ) );
-    }
-
-    QueryPoolCreateInfo& operator=( VkQueryPoolCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( QueryPoolCreateInfo ) );
-      return *this;
-    }
-    QueryPoolCreateInfo& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    QueryPoolCreateInfo& setFlags( QueryPoolCreateFlags flags_ )
-    {
-      flags = flags_;
-      return *this;
-    }
-
-    QueryPoolCreateInfo& setQueryType( QueryType queryType_ )
-    {
-      queryType = queryType_;
-      return *this;
-    }
-
-    QueryPoolCreateInfo& setQueryCount( uint32_t queryCount_ )
-    {
-      queryCount = queryCount_;
-      return *this;
-    }
-
-    QueryPoolCreateInfo& setPipelineStatistics( QueryPipelineStatisticFlags pipelineStatistics_ )
-    {
-      pipelineStatistics = pipelineStatistics_;
-      return *this;
-    }
-
-    operator VkQueryPoolCreateInfo const&() const
-    {
-      return *reinterpret_cast<const VkQueryPoolCreateInfo*>(this);
-    }
-
-    operator VkQueryPoolCreateInfo &()
-    {
-      return *reinterpret_cast<VkQueryPoolCreateInfo*>(this);
-    }
-
-    bool operator==( QueryPoolCreateInfo const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( queryType == rhs.queryType )
-          && ( queryCount == rhs.queryCount )
-          && ( pipelineStatistics == rhs.pipelineStatistics );
-    }
-
-    bool operator!=( QueryPoolCreateInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eQueryPoolCreateInfo;
-
-  public:
-    const void* pNext = nullptr;
-    QueryPoolCreateFlags flags;
-    QueryType queryType;
-    uint32_t queryCount;
-    QueryPipelineStatisticFlags pipelineStatistics;
-  };
-  static_assert( sizeof( QueryPoolCreateInfo ) == sizeof( VkQueryPoolCreateInfo ), "struct and wrapper have different size!" );
-
-  enum class ImageAspectFlagBits
-  {
-    eColor = VK_IMAGE_ASPECT_COLOR_BIT,
-    eDepth = VK_IMAGE_ASPECT_DEPTH_BIT,
-    eStencil = VK_IMAGE_ASPECT_STENCIL_BIT,
-    eMetadata = VK_IMAGE_ASPECT_METADATA_BIT,
-    ePlane0 = VK_IMAGE_ASPECT_PLANE_0_BIT,
-    ePlane0KHR = VK_IMAGE_ASPECT_PLANE_0_BIT,
-    ePlane1 = VK_IMAGE_ASPECT_PLANE_1_BIT,
-    ePlane1KHR = VK_IMAGE_ASPECT_PLANE_1_BIT,
-    ePlane2 = VK_IMAGE_ASPECT_PLANE_2_BIT,
-    ePlane2KHR = VK_IMAGE_ASPECT_PLANE_2_BIT,
-    eMemoryPlane0EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT,
-    eMemoryPlane1EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT,
-    eMemoryPlane2EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT,
-    eMemoryPlane3EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT
-  };
-
-  using ImageAspectFlags = Flags<ImageAspectFlagBits, VkImageAspectFlags>;
-
-  VULKAN_HPP_INLINE ImageAspectFlags operator|( ImageAspectFlagBits bit0, ImageAspectFlagBits bit1 )
-  {
-    return ImageAspectFlags( bit0 ) | bit1;
-  }
-
-  VULKAN_HPP_INLINE ImageAspectFlags operator~( ImageAspectFlagBits bits )
-  {
-    return ~( ImageAspectFlags( bits ) );
-  }
-
-  template <> struct FlagTraits<ImageAspectFlagBits>
-  {
-    enum
-    {
-      allFlags = VkFlags(ImageAspectFlagBits::eColor) | VkFlags(ImageAspectFlagBits::eDepth) | VkFlags(ImageAspectFlagBits::eStencil) | VkFlags(ImageAspectFlagBits::eMetadata) | VkFlags(ImageAspectFlagBits::ePlane0) | VkFlags(ImageAspectFlagBits::ePlane1) | VkFlags(ImageAspectFlagBits::ePlane2) | VkFlags(ImageAspectFlagBits::eMemoryPlane0EXT) | VkFlags(ImageAspectFlagBits::eMemoryPlane1EXT) | VkFlags(ImageAspectFlagBits::eMemoryPlane2EXT) | VkFlags(ImageAspectFlagBits::eMemoryPlane3EXT)
-    };
-  };
-
-  struct ImageSubresource
-  {
-    ImageSubresource( ImageAspectFlags aspectMask_ = ImageAspectFlags(),
-                      uint32_t mipLevel_ = 0,
-                      uint32_t arrayLayer_ = 0 )
-      : aspectMask( aspectMask_ )
-      , mipLevel( mipLevel_ )
-      , arrayLayer( arrayLayer_ )
-    {
-    }
-
-    ImageSubresource( VkImageSubresource const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ImageSubresource ) );
-    }
-
-    ImageSubresource& operator=( VkImageSubresource const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ImageSubresource ) );
-      return *this;
-    }
-    ImageSubresource& setAspectMask( ImageAspectFlags aspectMask_ )
-    {
-      aspectMask = aspectMask_;
-      return *this;
-    }
-
-    ImageSubresource& setMipLevel( uint32_t mipLevel_ )
-    {
-      mipLevel = mipLevel_;
-      return *this;
-    }
-
-    ImageSubresource& setArrayLayer( uint32_t arrayLayer_ )
-    {
-      arrayLayer = arrayLayer_;
-      return *this;
-    }
-
-    operator VkImageSubresource const&() const
-    {
-      return *reinterpret_cast<const VkImageSubresource*>(this);
-    }
-
-    operator VkImageSubresource &()
-    {
-      return *reinterpret_cast<VkImageSubresource*>(this);
-    }
-
-    bool operator==( ImageSubresource const& rhs ) const
-    {
-      return ( aspectMask == rhs.aspectMask )
-          && ( mipLevel == rhs.mipLevel )
-          && ( arrayLayer == rhs.arrayLayer );
-    }
-
-    bool operator!=( ImageSubresource const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    ImageAspectFlags aspectMask;
-    uint32_t mipLevel;
-    uint32_t arrayLayer;
-  };
-  static_assert( sizeof( ImageSubresource ) == sizeof( VkImageSubresource ), "struct and wrapper have different size!" );
-
-  struct ImageSubresourceLayers
-  {
-    ImageSubresourceLayers( ImageAspectFlags aspectMask_ = ImageAspectFlags(),
-                            uint32_t mipLevel_ = 0,
-                            uint32_t baseArrayLayer_ = 0,
-                            uint32_t layerCount_ = 0 )
-      : aspectMask( aspectMask_ )
-      , mipLevel( mipLevel_ )
-      , baseArrayLayer( baseArrayLayer_ )
-      , layerCount( layerCount_ )
-    {
-    }
-
-    ImageSubresourceLayers( VkImageSubresourceLayers const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ImageSubresourceLayers ) );
-    }
-
-    ImageSubresourceLayers& operator=( VkImageSubresourceLayers const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ImageSubresourceLayers ) );
-      return *this;
-    }
-    ImageSubresourceLayers& setAspectMask( ImageAspectFlags aspectMask_ )
-    {
-      aspectMask = aspectMask_;
-      return *this;
-    }
-
-    ImageSubresourceLayers& setMipLevel( uint32_t mipLevel_ )
-    {
-      mipLevel = mipLevel_;
-      return *this;
-    }
-
-    ImageSubresourceLayers& setBaseArrayLayer( uint32_t baseArrayLayer_ )
-    {
-      baseArrayLayer = baseArrayLayer_;
-      return *this;
-    }
-
-    ImageSubresourceLayers& setLayerCount( uint32_t layerCount_ )
-    {
-      layerCount = layerCount_;
-      return *this;
-    }
-
-    operator VkImageSubresourceLayers const&() const
-    {
-      return *reinterpret_cast<const VkImageSubresourceLayers*>(this);
-    }
-
-    operator VkImageSubresourceLayers &()
-    {
-      return *reinterpret_cast<VkImageSubresourceLayers*>(this);
-    }
-
-    bool operator==( ImageSubresourceLayers const& rhs ) const
-    {
-      return ( aspectMask == rhs.aspectMask )
-          && ( mipLevel == rhs.mipLevel )
-          && ( baseArrayLayer == rhs.baseArrayLayer )
-          && ( layerCount == rhs.layerCount );
-    }
-
-    bool operator!=( ImageSubresourceLayers const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    ImageAspectFlags aspectMask;
-    uint32_t mipLevel;
-    uint32_t baseArrayLayer;
-    uint32_t layerCount;
-  };
-  static_assert( sizeof( ImageSubresourceLayers ) == sizeof( VkImageSubresourceLayers ), "struct and wrapper have different size!" );
-
-  struct ImageSubresourceRange
-  {
-    ImageSubresourceRange( ImageAspectFlags aspectMask_ = ImageAspectFlags(),
-                           uint32_t baseMipLevel_ = 0,
-                           uint32_t levelCount_ = 0,
-                           uint32_t baseArrayLayer_ = 0,
-                           uint32_t layerCount_ = 0 )
-      : aspectMask( aspectMask_ )
-      , baseMipLevel( baseMipLevel_ )
-      , levelCount( levelCount_ )
-      , baseArrayLayer( baseArrayLayer_ )
-      , layerCount( layerCount_ )
-    {
-    }
-
-    ImageSubresourceRange( VkImageSubresourceRange const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ImageSubresourceRange ) );
-    }
-
-    ImageSubresourceRange& operator=( VkImageSubresourceRange const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ImageSubresourceRange ) );
-      return *this;
-    }
-    ImageSubresourceRange& setAspectMask( ImageAspectFlags aspectMask_ )
-    {
-      aspectMask = aspectMask_;
-      return *this;
-    }
-
-    ImageSubresourceRange& setBaseMipLevel( uint32_t baseMipLevel_ )
-    {
-      baseMipLevel = baseMipLevel_;
-      return *this;
-    }
-
-    ImageSubresourceRange& setLevelCount( uint32_t levelCount_ )
-    {
-      levelCount = levelCount_;
-      return *this;
-    }
-
-    ImageSubresourceRange& setBaseArrayLayer( uint32_t baseArrayLayer_ )
-    {
-      baseArrayLayer = baseArrayLayer_;
-      return *this;
-    }
-
-    ImageSubresourceRange& setLayerCount( uint32_t layerCount_ )
-    {
-      layerCount = layerCount_;
-      return *this;
-    }
-
-    operator VkImageSubresourceRange const&() const
-    {
-      return *reinterpret_cast<const VkImageSubresourceRange*>(this);
-    }
-
-    operator VkImageSubresourceRange &()
-    {
-      return *reinterpret_cast<VkImageSubresourceRange*>(this);
-    }
-
-    bool operator==( ImageSubresourceRange const& rhs ) const
-    {
-      return ( aspectMask == rhs.aspectMask )
-          && ( baseMipLevel == rhs.baseMipLevel )
-          && ( levelCount == rhs.levelCount )
-          && ( baseArrayLayer == rhs.baseArrayLayer )
-          && ( layerCount == rhs.layerCount );
-    }
-
-    bool operator!=( ImageSubresourceRange const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    ImageAspectFlags aspectMask;
-    uint32_t baseMipLevel;
-    uint32_t levelCount;
-    uint32_t baseArrayLayer;
-    uint32_t layerCount;
-  };
-  static_assert( sizeof( ImageSubresourceRange ) == sizeof( VkImageSubresourceRange ), "struct and wrapper have different size!" );
-
-  struct ImageMemoryBarrier
-  {
-    ImageMemoryBarrier( AccessFlags srcAccessMask_ = AccessFlags(),
-                        AccessFlags dstAccessMask_ = AccessFlags(),
-                        ImageLayout oldLayout_ = ImageLayout::eUndefined,
-                        ImageLayout newLayout_ = ImageLayout::eUndefined,
-                        uint32_t srcQueueFamilyIndex_ = 0,
-                        uint32_t dstQueueFamilyIndex_ = 0,
-                        Image image_ = Image(),
-                        ImageSubresourceRange subresourceRange_ = ImageSubresourceRange() )
-      : srcAccessMask( srcAccessMask_ )
-      , dstAccessMask( dstAccessMask_ )
-      , oldLayout( oldLayout_ )
-      , newLayout( newLayout_ )
-      , srcQueueFamilyIndex( srcQueueFamilyIndex_ )
-      , dstQueueFamilyIndex( dstQueueFamilyIndex_ )
-      , image( image_ )
-      , subresourceRange( subresourceRange_ )
-    {
-    }
-
-    ImageMemoryBarrier( VkImageMemoryBarrier const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ImageMemoryBarrier ) );
-    }
-
-    ImageMemoryBarrier& operator=( VkImageMemoryBarrier const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ImageMemoryBarrier ) );
-      return *this;
-    }
-    ImageMemoryBarrier& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    ImageMemoryBarrier& setSrcAccessMask( AccessFlags srcAccessMask_ )
-    {
-      srcAccessMask = srcAccessMask_;
-      return *this;
-    }
-
-    ImageMemoryBarrier& setDstAccessMask( AccessFlags dstAccessMask_ )
-    {
-      dstAccessMask = dstAccessMask_;
-      return *this;
-    }
-
-    ImageMemoryBarrier& setOldLayout( ImageLayout oldLayout_ )
-    {
-      oldLayout = oldLayout_;
-      return *this;
-    }
-
-    ImageMemoryBarrier& setNewLayout( ImageLayout newLayout_ )
-    {
-      newLayout = newLayout_;
-      return *this;
-    }
-
-    ImageMemoryBarrier& setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ )
-    {
-      srcQueueFamilyIndex = srcQueueFamilyIndex_;
-      return *this;
-    }
-
-    ImageMemoryBarrier& setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ )
-    {
-      dstQueueFamilyIndex = dstQueueFamilyIndex_;
-      return *this;
-    }
-
-    ImageMemoryBarrier& setImage( Image image_ )
-    {
-      image = image_;
-      return *this;
-    }
-
-    ImageMemoryBarrier& setSubresourceRange( ImageSubresourceRange subresourceRange_ )
-    {
-      subresourceRange = subresourceRange_;
-      return *this;
-    }
-
-    operator VkImageMemoryBarrier const&() const
-    {
-      return *reinterpret_cast<const VkImageMemoryBarrier*>(this);
-    }
-
-    operator VkImageMemoryBarrier &()
-    {
-      return *reinterpret_cast<VkImageMemoryBarrier*>(this);
-    }
-
-    bool operator==( ImageMemoryBarrier const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( srcAccessMask == rhs.srcAccessMask )
-          && ( dstAccessMask == rhs.dstAccessMask )
-          && ( oldLayout == rhs.oldLayout )
-          && ( newLayout == rhs.newLayout )
-          && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex )
-          && ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex )
-          && ( image == rhs.image )
-          && ( subresourceRange == rhs.subresourceRange );
-    }
-
-    bool operator!=( ImageMemoryBarrier const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eImageMemoryBarrier;
-
-  public:
-    const void* pNext = nullptr;
-    AccessFlags srcAccessMask;
-    AccessFlags dstAccessMask;
-    ImageLayout oldLayout;
-    ImageLayout newLayout;
-    uint32_t srcQueueFamilyIndex;
-    uint32_t dstQueueFamilyIndex;
-    Image image;
-    ImageSubresourceRange subresourceRange;
-  };
-  static_assert( sizeof( ImageMemoryBarrier ) == sizeof( VkImageMemoryBarrier ), "struct and wrapper have different size!" );
-
-  struct ImageViewCreateInfo
-  {
-    ImageViewCreateInfo( ImageViewCreateFlags flags_ = ImageViewCreateFlags(),
-                         Image image_ = Image(),
-                         ImageViewType viewType_ = ImageViewType::e1D,
-                         Format format_ = Format::eUndefined,
-                         ComponentMapping components_ = ComponentMapping(),
-                         ImageSubresourceRange subresourceRange_ = ImageSubresourceRange() )
-      : flags( flags_ )
-      , image( image_ )
-      , viewType( viewType_ )
-      , format( format_ )
-      , components( components_ )
-      , subresourceRange( subresourceRange_ )
-    {
-    }
-
-    ImageViewCreateInfo( VkImageViewCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ImageViewCreateInfo ) );
-    }
-
-    ImageViewCreateInfo& operator=( VkImageViewCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ImageViewCreateInfo ) );
-      return *this;
-    }
-    ImageViewCreateInfo& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    ImageViewCreateInfo& setFlags( ImageViewCreateFlags flags_ )
-    {
-      flags = flags_;
-      return *this;
-    }
-
-    ImageViewCreateInfo& setImage( Image image_ )
-    {
-      image = image_;
-      return *this;
-    }
-
-    ImageViewCreateInfo& setViewType( ImageViewType viewType_ )
-    {
-      viewType = viewType_;
-      return *this;
-    }
-
-    ImageViewCreateInfo& setFormat( Format format_ )
-    {
-      format = format_;
-      return *this;
-    }
-
-    ImageViewCreateInfo& setComponents( ComponentMapping components_ )
-    {
-      components = components_;
-      return *this;
-    }
-
-    ImageViewCreateInfo& setSubresourceRange( ImageSubresourceRange subresourceRange_ )
-    {
-      subresourceRange = subresourceRange_;
-      return *this;
-    }
-
-    operator VkImageViewCreateInfo const&() const
-    {
-      return *reinterpret_cast<const VkImageViewCreateInfo*>(this);
-    }
-
-    operator VkImageViewCreateInfo &()
-    {
-      return *reinterpret_cast<VkImageViewCreateInfo*>(this);
-    }
-
-    bool operator==( ImageViewCreateInfo const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( image == rhs.image )
-          && ( viewType == rhs.viewType )
-          && ( format == rhs.format )
-          && ( components == rhs.components )
-          && ( subresourceRange == rhs.subresourceRange );
-    }
-
-    bool operator!=( ImageViewCreateInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eImageViewCreateInfo;
-
-  public:
-    const void* pNext = nullptr;
-    ImageViewCreateFlags flags;
-    Image image;
-    ImageViewType viewType;
-    Format format;
-    ComponentMapping components;
-    ImageSubresourceRange subresourceRange;
-  };
-  static_assert( sizeof( ImageViewCreateInfo ) == sizeof( VkImageViewCreateInfo ), "struct and wrapper have different size!" );
-
-  struct ImageCopy
-  {
-    ImageCopy( ImageSubresourceLayers srcSubresource_ = ImageSubresourceLayers(),
-               Offset3D srcOffset_ = Offset3D(),
-               ImageSubresourceLayers dstSubresource_ = ImageSubresourceLayers(),
-               Offset3D dstOffset_ = Offset3D(),
-               Extent3D extent_ = Extent3D() )
-      : srcSubresource( srcSubresource_ )
-      , srcOffset( srcOffset_ )
-      , dstSubresource( dstSubresource_ )
-      , dstOffset( dstOffset_ )
-      , extent( extent_ )
-    {
-    }
-
-    ImageCopy( VkImageCopy const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ImageCopy ) );
-    }
-
-    ImageCopy& operator=( VkImageCopy const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ImageCopy ) );
-      return *this;
-    }
-    ImageCopy& setSrcSubresource( ImageSubresourceLayers srcSubresource_ )
-    {
-      srcSubresource = srcSubresource_;
-      return *this;
-    }
-
-    ImageCopy& setSrcOffset( Offset3D srcOffset_ )
-    {
-      srcOffset = srcOffset_;
-      return *this;
-    }
-
-    ImageCopy& setDstSubresource( ImageSubresourceLayers dstSubresource_ )
-    {
-      dstSubresource = dstSubresource_;
-      return *this;
-    }
-
-    ImageCopy& setDstOffset( Offset3D dstOffset_ )
-    {
-      dstOffset = dstOffset_;
-      return *this;
-    }
-
-    ImageCopy& setExtent( Extent3D extent_ )
-    {
-      extent = extent_;
-      return *this;
-    }
-
-    operator VkImageCopy const&() const
-    {
-      return *reinterpret_cast<const VkImageCopy*>(this);
-    }
-
-    operator VkImageCopy &()
-    {
-      return *reinterpret_cast<VkImageCopy*>(this);
-    }
-
-    bool operator==( ImageCopy const& rhs ) const
-    {
-      return ( srcSubresource == rhs.srcSubresource )
-          && ( srcOffset == rhs.srcOffset )
-          && ( dstSubresource == rhs.dstSubresource )
-          && ( dstOffset == rhs.dstOffset )
-          && ( extent == rhs.extent );
-    }
-
-    bool operator!=( ImageCopy const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    ImageSubresourceLayers srcSubresource;
-    Offset3D srcOffset;
-    ImageSubresourceLayers dstSubresource;
-    Offset3D dstOffset;
-    Extent3D extent;
-  };
-  static_assert( sizeof( ImageCopy ) == sizeof( VkImageCopy ), "struct and wrapper have different size!" );
-
-  struct ImageBlit
-  {
-    ImageBlit( ImageSubresourceLayers srcSubresource_ = ImageSubresourceLayers(),
-               std::array<Offset3D,2> const& srcOffsets_ = { { Offset3D(), Offset3D() } },
-               ImageSubresourceLayers dstSubresource_ = ImageSubresourceLayers(),
-               std::array<Offset3D,2> const& dstOffsets_ = { { Offset3D(), Offset3D() } } )
-      : srcSubresource( srcSubresource_ )
-      , dstSubresource( dstSubresource_ )
-    {
-      memcpy( &srcOffsets, srcOffsets_.data(), 2 * sizeof( Offset3D ) );
-      memcpy( &dstOffsets, dstOffsets_.data(), 2 * sizeof( Offset3D ) );
-    }
-
-    ImageBlit( VkImageBlit const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ImageBlit ) );
-    }
-
-    ImageBlit& operator=( VkImageBlit const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ImageBlit ) );
-      return *this;
-    }
-    ImageBlit& setSrcSubresource( ImageSubresourceLayers srcSubresource_ )
-    {
-      srcSubresource = srcSubresource_;
-      return *this;
-    }
-
-    ImageBlit& setSrcOffsets( std::array<Offset3D,2> srcOffsets_ )
-    {
-      memcpy( &srcOffsets, srcOffsets_.data(), 2 * sizeof( Offset3D ) );
-      return *this;
-    }
-
-    ImageBlit& setDstSubresource( ImageSubresourceLayers dstSubresource_ )
-    {
-      dstSubresource = dstSubresource_;
-      return *this;
-    }
-
-    ImageBlit& setDstOffsets( std::array<Offset3D,2> dstOffsets_ )
-    {
-      memcpy( &dstOffsets, dstOffsets_.data(), 2 * sizeof( Offset3D ) );
-      return *this;
-    }
-
-    operator VkImageBlit const&() const
-    {
-      return *reinterpret_cast<const VkImageBlit*>(this);
-    }
-
-    operator VkImageBlit &()
-    {
-      return *reinterpret_cast<VkImageBlit*>(this);
-    }
-
-    bool operator==( ImageBlit const& rhs ) const
-    {
-      return ( srcSubresource == rhs.srcSubresource )
-          && ( memcmp( srcOffsets, rhs.srcOffsets, 2 * sizeof( Offset3D ) ) == 0 )
-          && ( dstSubresource == rhs.dstSubresource )
-          && ( memcmp( dstOffsets, rhs.dstOffsets, 2 * sizeof( Offset3D ) ) == 0 );
-    }
-
-    bool operator!=( ImageBlit const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    ImageSubresourceLayers srcSubresource;
-    Offset3D srcOffsets[2];
-    ImageSubresourceLayers dstSubresource;
-    Offset3D dstOffsets[2];
-  };
-  static_assert( sizeof( ImageBlit ) == sizeof( VkImageBlit ), "struct and wrapper have different size!" );
-
-  struct BufferImageCopy
-  {
-    BufferImageCopy( DeviceSize bufferOffset_ = 0,
-                     uint32_t bufferRowLength_ = 0,
-                     uint32_t bufferImageHeight_ = 0,
-                     ImageSubresourceLayers imageSubresource_ = ImageSubresourceLayers(),
-                     Offset3D imageOffset_ = Offset3D(),
-                     Extent3D imageExtent_ = Extent3D() )
-      : bufferOffset( bufferOffset_ )
-      , bufferRowLength( bufferRowLength_ )
-      , bufferImageHeight( bufferImageHeight_ )
-      , imageSubresource( imageSubresource_ )
-      , imageOffset( imageOffset_ )
-      , imageExtent( imageExtent_ )
-    {
-    }
-
-    BufferImageCopy( VkBufferImageCopy const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( BufferImageCopy ) );
-    }
-
-    BufferImageCopy& operator=( VkBufferImageCopy const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( BufferImageCopy ) );
-      return *this;
-    }
-    BufferImageCopy& setBufferOffset( DeviceSize bufferOffset_ )
-    {
-      bufferOffset = bufferOffset_;
-      return *this;
-    }
-
-    BufferImageCopy& setBufferRowLength( uint32_t bufferRowLength_ )
-    {
-      bufferRowLength = bufferRowLength_;
-      return *this;
-    }
-
-    BufferImageCopy& setBufferImageHeight( uint32_t bufferImageHeight_ )
-    {
-      bufferImageHeight = bufferImageHeight_;
-      return *this;
-    }
-
-    BufferImageCopy& setImageSubresource( ImageSubresourceLayers imageSubresource_ )
-    {
-      imageSubresource = imageSubresource_;
-      return *this;
-    }
-
-    BufferImageCopy& setImageOffset( Offset3D imageOffset_ )
-    {
-      imageOffset = imageOffset_;
-      return *this;
-    }
-
-    BufferImageCopy& setImageExtent( Extent3D imageExtent_ )
-    {
-      imageExtent = imageExtent_;
-      return *this;
-    }
-
-    operator VkBufferImageCopy const&() const
-    {
-      return *reinterpret_cast<const VkBufferImageCopy*>(this);
-    }
-
-    operator VkBufferImageCopy &()
-    {
-      return *reinterpret_cast<VkBufferImageCopy*>(this);
-    }
-
-    bool operator==( BufferImageCopy const& rhs ) const
-    {
-      return ( bufferOffset == rhs.bufferOffset )
-          && ( bufferRowLength == rhs.bufferRowLength )
-          && ( bufferImageHeight == rhs.bufferImageHeight )
-          && ( imageSubresource == rhs.imageSubresource )
-          && ( imageOffset == rhs.imageOffset )
-          && ( imageExtent == rhs.imageExtent );
-    }
-
-    bool operator!=( BufferImageCopy const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    DeviceSize bufferOffset;
-    uint32_t bufferRowLength;
-    uint32_t bufferImageHeight;
-    ImageSubresourceLayers imageSubresource;
-    Offset3D imageOffset;
-    Extent3D imageExtent;
-  };
-  static_assert( sizeof( BufferImageCopy ) == sizeof( VkBufferImageCopy ), "struct and wrapper have different size!" );
-
-  struct ImageResolve
-  {
-    ImageResolve( ImageSubresourceLayers srcSubresource_ = ImageSubresourceLayers(),
-                  Offset3D srcOffset_ = Offset3D(),
-                  ImageSubresourceLayers dstSubresource_ = ImageSubresourceLayers(),
-                  Offset3D dstOffset_ = Offset3D(),
-                  Extent3D extent_ = Extent3D() )
-      : srcSubresource( srcSubresource_ )
-      , srcOffset( srcOffset_ )
-      , dstSubresource( dstSubresource_ )
-      , dstOffset( dstOffset_ )
-      , extent( extent_ )
-    {
-    }
-
-    ImageResolve( VkImageResolve const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ImageResolve ) );
-    }
-
-    ImageResolve& operator=( VkImageResolve const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ImageResolve ) );
-      return *this;
-    }
-    ImageResolve& setSrcSubresource( ImageSubresourceLayers srcSubresource_ )
-    {
-      srcSubresource = srcSubresource_;
-      return *this;
-    }
-
-    ImageResolve& setSrcOffset( Offset3D srcOffset_ )
-    {
-      srcOffset = srcOffset_;
-      return *this;
-    }
-
-    ImageResolve& setDstSubresource( ImageSubresourceLayers dstSubresource_ )
-    {
-      dstSubresource = dstSubresource_;
-      return *this;
-    }
-
-    ImageResolve& setDstOffset( Offset3D dstOffset_ )
-    {
-      dstOffset = dstOffset_;
-      return *this;
-    }
-
-    ImageResolve& setExtent( Extent3D extent_ )
-    {
-      extent = extent_;
-      return *this;
-    }
-
-    operator VkImageResolve const&() const
-    {
-      return *reinterpret_cast<const VkImageResolve*>(this);
-    }
-
-    operator VkImageResolve &()
-    {
-      return *reinterpret_cast<VkImageResolve*>(this);
-    }
-
-    bool operator==( ImageResolve const& rhs ) const
-    {
-      return ( srcSubresource == rhs.srcSubresource )
-          && ( srcOffset == rhs.srcOffset )
-          && ( dstSubresource == rhs.dstSubresource )
-          && ( dstOffset == rhs.dstOffset )
-          && ( extent == rhs.extent );
-    }
-
-    bool operator!=( ImageResolve const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    ImageSubresourceLayers srcSubresource;
-    Offset3D srcOffset;
-    ImageSubresourceLayers dstSubresource;
-    Offset3D dstOffset;
-    Extent3D extent;
-  };
-  static_assert( sizeof( ImageResolve ) == sizeof( VkImageResolve ), "struct and wrapper have different size!" );
-
-  struct ClearAttachment
-  {
-    ClearAttachment( ImageAspectFlags aspectMask_ = ImageAspectFlags(),
-                     uint32_t colorAttachment_ = 0,
-                     ClearValue clearValue_ = ClearValue() )
-      : aspectMask( aspectMask_ )
-      , colorAttachment( colorAttachment_ )
-      , clearValue( clearValue_ )
-    {
-    }
-
-    ClearAttachment( VkClearAttachment const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ClearAttachment ) );
-    }
-
-    ClearAttachment& operator=( VkClearAttachment const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ClearAttachment ) );
-      return *this;
-    }
-    ClearAttachment& setAspectMask( ImageAspectFlags aspectMask_ )
-    {
-      aspectMask = aspectMask_;
-      return *this;
-    }
-
-    ClearAttachment& setColorAttachment( uint32_t colorAttachment_ )
-    {
-      colorAttachment = colorAttachment_;
-      return *this;
-    }
-
-    ClearAttachment& setClearValue( ClearValue clearValue_ )
-    {
-      clearValue = clearValue_;
-      return *this;
-    }
-
-    operator VkClearAttachment const&() const
-    {
-      return *reinterpret_cast<const VkClearAttachment*>(this);
-    }
-
-    operator VkClearAttachment &()
-    {
-      return *reinterpret_cast<VkClearAttachment*>(this);
-    }
-
-    ImageAspectFlags aspectMask;
-    uint32_t colorAttachment;
-    ClearValue clearValue;
-  };
-  static_assert( sizeof( ClearAttachment ) == sizeof( VkClearAttachment ), "struct and wrapper have different size!" );
-
-  struct InputAttachmentAspectReference
-  {
-    InputAttachmentAspectReference( uint32_t subpass_ = 0,
-                                    uint32_t inputAttachmentIndex_ = 0,
-                                    ImageAspectFlags aspectMask_ = ImageAspectFlags() )
-      : subpass( subpass_ )
-      , inputAttachmentIndex( inputAttachmentIndex_ )
-      , aspectMask( aspectMask_ )
-    {
-    }
-
-    InputAttachmentAspectReference( VkInputAttachmentAspectReference const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( InputAttachmentAspectReference ) );
-    }
-
-    InputAttachmentAspectReference& operator=( VkInputAttachmentAspectReference const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( InputAttachmentAspectReference ) );
-      return *this;
-    }
-    InputAttachmentAspectReference& setSubpass( uint32_t subpass_ )
-    {
-      subpass = subpass_;
-      return *this;
-    }
-
-    InputAttachmentAspectReference& setInputAttachmentIndex( uint32_t inputAttachmentIndex_ )
-    {
-      inputAttachmentIndex = inputAttachmentIndex_;
-      return *this;
-    }
-
-    InputAttachmentAspectReference& setAspectMask( ImageAspectFlags aspectMask_ )
-    {
-      aspectMask = aspectMask_;
-      return *this;
-    }
-
-    operator VkInputAttachmentAspectReference const&() const
-    {
-      return *reinterpret_cast<const VkInputAttachmentAspectReference*>(this);
-    }
-
-    operator VkInputAttachmentAspectReference &()
-    {
-      return *reinterpret_cast<VkInputAttachmentAspectReference*>(this);
-    }
-
-    bool operator==( InputAttachmentAspectReference const& rhs ) const
-    {
-      return ( subpass == rhs.subpass )
-          && ( inputAttachmentIndex == rhs.inputAttachmentIndex )
-          && ( aspectMask == rhs.aspectMask );
-    }
-
-    bool operator!=( InputAttachmentAspectReference const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    uint32_t subpass;
-    uint32_t inputAttachmentIndex;
-    ImageAspectFlags aspectMask;
-  };
-  static_assert( sizeof( InputAttachmentAspectReference ) == sizeof( VkInputAttachmentAspectReference ), "struct and wrapper have different size!" );
-
-  using InputAttachmentAspectReferenceKHR = InputAttachmentAspectReference;
-
-  struct RenderPassInputAttachmentAspectCreateInfo
-  {
-    RenderPassInputAttachmentAspectCreateInfo( uint32_t aspectReferenceCount_ = 0,
-                                               const InputAttachmentAspectReference* pAspectReferences_ = nullptr )
-      : aspectReferenceCount( aspectReferenceCount_ )
-      , pAspectReferences( pAspectReferences_ )
-    {
-    }
-
-    RenderPassInputAttachmentAspectCreateInfo( VkRenderPassInputAttachmentAspectCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( RenderPassInputAttachmentAspectCreateInfo ) );
-    }
-
-    RenderPassInputAttachmentAspectCreateInfo& operator=( VkRenderPassInputAttachmentAspectCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( RenderPassInputAttachmentAspectCreateInfo ) );
-      return *this;
-    }
-    RenderPassInputAttachmentAspectCreateInfo& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    RenderPassInputAttachmentAspectCreateInfo& setAspectReferenceCount( uint32_t aspectReferenceCount_ )
-    {
-      aspectReferenceCount = aspectReferenceCount_;
-      return *this;
-    }
-
-    RenderPassInputAttachmentAspectCreateInfo& setPAspectReferences( const InputAttachmentAspectReference* pAspectReferences_ )
-    {
-      pAspectReferences = pAspectReferences_;
-      return *this;
-    }
-
-    operator VkRenderPassInputAttachmentAspectCreateInfo const&() const
-    {
-      return *reinterpret_cast<const VkRenderPassInputAttachmentAspectCreateInfo*>(this);
-    }
-
-    operator VkRenderPassInputAttachmentAspectCreateInfo &()
-    {
-      return *reinterpret_cast<VkRenderPassInputAttachmentAspectCreateInfo*>(this);
-    }
-
-    bool operator==( RenderPassInputAttachmentAspectCreateInfo const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( aspectReferenceCount == rhs.aspectReferenceCount )
-          && ( pAspectReferences == rhs.pAspectReferences );
-    }
-
-    bool operator!=( RenderPassInputAttachmentAspectCreateInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eRenderPassInputAttachmentAspectCreateInfo;
-
-  public:
-    const void* pNext = nullptr;
-    uint32_t aspectReferenceCount;
-    const InputAttachmentAspectReference* pAspectReferences;
-  };
-  static_assert( sizeof( RenderPassInputAttachmentAspectCreateInfo ) == sizeof( VkRenderPassInputAttachmentAspectCreateInfo ), "struct and wrapper have different size!" );
-
-  using RenderPassInputAttachmentAspectCreateInfoKHR = RenderPassInputAttachmentAspectCreateInfo;
-
-  struct BindImagePlaneMemoryInfo
-  {
-    BindImagePlaneMemoryInfo( ImageAspectFlagBits planeAspect_ = ImageAspectFlagBits::eColor )
-      : planeAspect( planeAspect_ )
-    {
-    }
-
-    BindImagePlaneMemoryInfo( VkBindImagePlaneMemoryInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( BindImagePlaneMemoryInfo ) );
-    }
-
-    BindImagePlaneMemoryInfo& operator=( VkBindImagePlaneMemoryInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( BindImagePlaneMemoryInfo ) );
-      return *this;
-    }
-    BindImagePlaneMemoryInfo& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    BindImagePlaneMemoryInfo& setPlaneAspect( ImageAspectFlagBits planeAspect_ )
-    {
-      planeAspect = planeAspect_;
-      return *this;
-    }
-
-    operator VkBindImagePlaneMemoryInfo const&() const
-    {
-      return *reinterpret_cast<const VkBindImagePlaneMemoryInfo*>(this);
-    }
-
-    operator VkBindImagePlaneMemoryInfo &()
-    {
-      return *reinterpret_cast<VkBindImagePlaneMemoryInfo*>(this);
-    }
-
-    bool operator==( BindImagePlaneMemoryInfo const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( planeAspect == rhs.planeAspect );
-    }
-
-    bool operator!=( BindImagePlaneMemoryInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eBindImagePlaneMemoryInfo;
-
-  public:
-    const void* pNext = nullptr;
-    ImageAspectFlagBits planeAspect;
-  };
-  static_assert( sizeof( BindImagePlaneMemoryInfo ) == sizeof( VkBindImagePlaneMemoryInfo ), "struct and wrapper have different size!" );
-
-  using BindImagePlaneMemoryInfoKHR = BindImagePlaneMemoryInfo;
-
-  struct ImagePlaneMemoryRequirementsInfo
-  {
-    ImagePlaneMemoryRequirementsInfo( ImageAspectFlagBits planeAspect_ = ImageAspectFlagBits::eColor )
-      : planeAspect( planeAspect_ )
-    {
-    }
-
-    ImagePlaneMemoryRequirementsInfo( VkImagePlaneMemoryRequirementsInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ImagePlaneMemoryRequirementsInfo ) );
-    }
-
-    ImagePlaneMemoryRequirementsInfo& operator=( VkImagePlaneMemoryRequirementsInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ImagePlaneMemoryRequirementsInfo ) );
-      return *this;
-    }
-    ImagePlaneMemoryRequirementsInfo& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    ImagePlaneMemoryRequirementsInfo& setPlaneAspect( ImageAspectFlagBits planeAspect_ )
-    {
-      planeAspect = planeAspect_;
-      return *this;
-    }
-
-    operator VkImagePlaneMemoryRequirementsInfo const&() const
-    {
-      return *reinterpret_cast<const VkImagePlaneMemoryRequirementsInfo*>(this);
-    }
-
-    operator VkImagePlaneMemoryRequirementsInfo &()
-    {
-      return *reinterpret_cast<VkImagePlaneMemoryRequirementsInfo*>(this);
-    }
-
-    bool operator==( ImagePlaneMemoryRequirementsInfo const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( planeAspect == rhs.planeAspect );
-    }
-
-    bool operator!=( ImagePlaneMemoryRequirementsInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eImagePlaneMemoryRequirementsInfo;
-
-  public:
-    const void* pNext = nullptr;
-    ImageAspectFlagBits planeAspect;
-  };
-  static_assert( sizeof( ImagePlaneMemoryRequirementsInfo ) == sizeof( VkImagePlaneMemoryRequirementsInfo ), "struct and wrapper have different size!" );
-
-  using ImagePlaneMemoryRequirementsInfoKHR = ImagePlaneMemoryRequirementsInfo;
-
-  struct AttachmentReference2KHR
-  {
-    AttachmentReference2KHR( uint32_t attachment_ = 0,
-                             ImageLayout layout_ = ImageLayout::eUndefined,
-                             ImageAspectFlags aspectMask_ = ImageAspectFlags() )
-      : attachment( attachment_ )
-      , layout( layout_ )
-      , aspectMask( aspectMask_ )
-    {
-    }
-
-    AttachmentReference2KHR( VkAttachmentReference2KHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( AttachmentReference2KHR ) );
-    }
-
-    AttachmentReference2KHR& operator=( VkAttachmentReference2KHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( AttachmentReference2KHR ) );
-      return *this;
-    }
-    AttachmentReference2KHR& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    AttachmentReference2KHR& setAttachment( uint32_t attachment_ )
-    {
-      attachment = attachment_;
-      return *this;
-    }
-
-    AttachmentReference2KHR& setLayout( ImageLayout layout_ )
-    {
-      layout = layout_;
-      return *this;
-    }
-
-    AttachmentReference2KHR& setAspectMask( ImageAspectFlags aspectMask_ )
-    {
-      aspectMask = aspectMask_;
-      return *this;
-    }
-
-    operator VkAttachmentReference2KHR const&() const
-    {
-      return *reinterpret_cast<const VkAttachmentReference2KHR*>(this);
-    }
-
-    operator VkAttachmentReference2KHR &()
-    {
-      return *reinterpret_cast<VkAttachmentReference2KHR*>(this);
-    }
-
-    bool operator==( AttachmentReference2KHR const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( attachment == rhs.attachment )
-          && ( layout == rhs.layout )
-          && ( aspectMask == rhs.aspectMask );
-    }
-
-    bool operator!=( AttachmentReference2KHR const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eAttachmentReference2KHR;
-
-  public:
-    const void* pNext = nullptr;
-    uint32_t attachment;
-    ImageLayout layout;
-    ImageAspectFlags aspectMask;
-  };
-  static_assert( sizeof( AttachmentReference2KHR ) == sizeof( VkAttachmentReference2KHR ), "struct and wrapper have different size!" );
-
-  enum class SparseImageFormatFlagBits
-  {
-    eSingleMiptail = VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT,
-    eAlignedMipSize = VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT,
-    eNonstandardBlockSize = VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT
-  };
-
-  using SparseImageFormatFlags = Flags<SparseImageFormatFlagBits, VkSparseImageFormatFlags>;
-
-  VULKAN_HPP_INLINE SparseImageFormatFlags operator|( SparseImageFormatFlagBits bit0, SparseImageFormatFlagBits bit1 )
-  {
-    return SparseImageFormatFlags( bit0 ) | bit1;
-  }
-
-  VULKAN_HPP_INLINE SparseImageFormatFlags operator~( SparseImageFormatFlagBits bits )
-  {
-    return ~( SparseImageFormatFlags( bits ) );
-  }
-
-  template <> struct FlagTraits<SparseImageFormatFlagBits>
-  {
-    enum
-    {
-      allFlags = VkFlags(SparseImageFormatFlagBits::eSingleMiptail) | VkFlags(SparseImageFormatFlagBits::eAlignedMipSize) | VkFlags(SparseImageFormatFlagBits::eNonstandardBlockSize)
-    };
-  };
-
-  struct SparseImageFormatProperties
-  {
-    operator VkSparseImageFormatProperties const&() const
-    {
-      return *reinterpret_cast<const VkSparseImageFormatProperties*>(this);
-    }
-
-    operator VkSparseImageFormatProperties &()
-    {
-      return *reinterpret_cast<VkSparseImageFormatProperties*>(this);
-    }
-
-    bool operator==( SparseImageFormatProperties const& rhs ) const
-    {
-      return ( aspectMask == rhs.aspectMask )
-          && ( imageGranularity == rhs.imageGranularity )
-          && ( flags == rhs.flags );
-    }
-
-    bool operator!=( SparseImageFormatProperties const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    ImageAspectFlags aspectMask;
-    Extent3D imageGranularity;
-    SparseImageFormatFlags flags;
-  };
-  static_assert( sizeof( SparseImageFormatProperties ) == sizeof( VkSparseImageFormatProperties ), "struct and wrapper have different size!" );
-
-  struct SparseImageMemoryRequirements
-  {
-    operator VkSparseImageMemoryRequirements const&() const
-    {
-      return *reinterpret_cast<const VkSparseImageMemoryRequirements*>(this);
-    }
-
-    operator VkSparseImageMemoryRequirements &()
-    {
-      return *reinterpret_cast<VkSparseImageMemoryRequirements*>(this);
-    }
-
-    bool operator==( SparseImageMemoryRequirements const& rhs ) const
-    {
-      return ( formatProperties == rhs.formatProperties )
-          && ( imageMipTailFirstLod == rhs.imageMipTailFirstLod )
-          && ( imageMipTailSize == rhs.imageMipTailSize )
-          && ( imageMipTailOffset == rhs.imageMipTailOffset )
-          && ( imageMipTailStride == rhs.imageMipTailStride );
-    }
-
-    bool operator!=( SparseImageMemoryRequirements const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    SparseImageFormatProperties formatProperties;
-    uint32_t imageMipTailFirstLod;
-    DeviceSize imageMipTailSize;
-    DeviceSize imageMipTailOffset;
-    DeviceSize imageMipTailStride;
-  };
-  static_assert( sizeof( SparseImageMemoryRequirements ) == sizeof( VkSparseImageMemoryRequirements ), "struct and wrapper have different size!" );
-
-  struct SparseImageFormatProperties2
-  {
-    operator VkSparseImageFormatProperties2 const&() const
-    {
-      return *reinterpret_cast<const VkSparseImageFormatProperties2*>(this);
-    }
-
-    operator VkSparseImageFormatProperties2 &()
-    {
-      return *reinterpret_cast<VkSparseImageFormatProperties2*>(this);
-    }
-
-    bool operator==( SparseImageFormatProperties2 const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( properties == rhs.properties );
-    }
-
-    bool operator!=( SparseImageFormatProperties2 const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eSparseImageFormatProperties2;
-
-  public:
-    void* pNext = nullptr;
-    SparseImageFormatProperties properties;
-  };
-  static_assert( sizeof( SparseImageFormatProperties2 ) == sizeof( VkSparseImageFormatProperties2 ), "struct and wrapper have different size!" );
-
-  using SparseImageFormatProperties2KHR = SparseImageFormatProperties2;
-
-  struct SparseImageMemoryRequirements2
-  {
-    operator VkSparseImageMemoryRequirements2 const&() const
-    {
-      return *reinterpret_cast<const VkSparseImageMemoryRequirements2*>(this);
-    }
-
-    operator VkSparseImageMemoryRequirements2 &()
-    {
-      return *reinterpret_cast<VkSparseImageMemoryRequirements2*>(this);
-    }
-
-    bool operator==( SparseImageMemoryRequirements2 const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( memoryRequirements == rhs.memoryRequirements );
-    }
-
-    bool operator!=( SparseImageMemoryRequirements2 const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eSparseImageMemoryRequirements2;
-
-  public:
-    void* pNext = nullptr;
-    SparseImageMemoryRequirements memoryRequirements;
-  };
-  static_assert( sizeof( SparseImageMemoryRequirements2 ) == sizeof( VkSparseImageMemoryRequirements2 ), "struct and wrapper have different size!" );
-
-  using SparseImageMemoryRequirements2KHR = SparseImageMemoryRequirements2;
-
-  enum class SparseMemoryBindFlagBits
-  {
-    eMetadata = VK_SPARSE_MEMORY_BIND_METADATA_BIT
-  };
-
-  using SparseMemoryBindFlags = Flags<SparseMemoryBindFlagBits, VkSparseMemoryBindFlags>;
-
-  VULKAN_HPP_INLINE SparseMemoryBindFlags operator|( SparseMemoryBindFlagBits bit0, SparseMemoryBindFlagBits bit1 )
-  {
-    return SparseMemoryBindFlags( bit0 ) | bit1;
-  }
-
-  VULKAN_HPP_INLINE SparseMemoryBindFlags operator~( SparseMemoryBindFlagBits bits )
-  {
-    return ~( SparseMemoryBindFlags( bits ) );
-  }
-
-  template <> struct FlagTraits<SparseMemoryBindFlagBits>
-  {
-    enum
-    {
-      allFlags = VkFlags(SparseMemoryBindFlagBits::eMetadata)
-    };
-  };
-
-  struct SparseMemoryBind
-  {
-    SparseMemoryBind( DeviceSize resourceOffset_ = 0,
-                      DeviceSize size_ = 0,
-                      DeviceMemory memory_ = DeviceMemory(),
-                      DeviceSize memoryOffset_ = 0,
-                      SparseMemoryBindFlags flags_ = SparseMemoryBindFlags() )
-      : resourceOffset( resourceOffset_ )
-      , size( size_ )
-      , memory( memory_ )
-      , memoryOffset( memoryOffset_ )
-      , flags( flags_ )
-    {
-    }
-
-    SparseMemoryBind( VkSparseMemoryBind const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( SparseMemoryBind ) );
-    }
-
-    SparseMemoryBind& operator=( VkSparseMemoryBind const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( SparseMemoryBind ) );
-      return *this;
-    }
-    SparseMemoryBind& setResourceOffset( DeviceSize resourceOffset_ )
-    {
-      resourceOffset = resourceOffset_;
-      return *this;
-    }
-
-    SparseMemoryBind& setSize( DeviceSize size_ )
-    {
-      size = size_;
-      return *this;
-    }
-
-    SparseMemoryBind& setMemory( DeviceMemory memory_ )
+    BindBufferMemoryInfo & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
     {
       memory = memory_;
       return *this;
     }
 
-    SparseMemoryBind& setMemoryOffset( DeviceSize memoryOffset_ )
+    BindBufferMemoryInfo & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
     {
       memoryOffset = memoryOffset_;
       return *this;
     }
 
-    SparseMemoryBind& setFlags( SparseMemoryBindFlags flags_ )
+
+    operator VkBindBufferMemoryInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBindBufferMemoryInfo*>( this );
+    }
+
+    operator VkBindBufferMemoryInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBindBufferMemoryInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( BindBufferMemoryInfo const& ) const = default;
+#else
+    bool operator==( BindBufferMemoryInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( buffer == rhs.buffer )
+          && ( memory == rhs.memory )
+          && ( memoryOffset == rhs.memoryOffset );
+    }
+
+    bool operator!=( BindBufferMemoryInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindBufferMemoryInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Buffer buffer = {};
+    VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {};
+
+  };
+  static_assert( sizeof( BindBufferMemoryInfo ) == sizeof( VkBindBufferMemoryInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<BindBufferMemoryInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eBindBufferMemoryInfo>
+  {
+    using Type = BindBufferMemoryInfo;
+  };
+  using BindBufferMemoryInfoKHR = BindBufferMemoryInfo;
+
+  struct Offset2D
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR Offset2D(int32_t x_ = {}, int32_t y_ = {}) VULKAN_HPP_NOEXCEPT
+    : x( x_ ), y( y_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR Offset2D( Offset2D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    Offset2D( VkOffset2D const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    Offset2D & operator=( VkOffset2D const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Offset2D const *>( &rhs );
+      return *this;
+    }
+
+    Offset2D & operator=( Offset2D const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( Offset2D ) );
+      return *this;
+    }
+
+    Offset2D & setX( int32_t x_ ) VULKAN_HPP_NOEXCEPT
+    {
+      x = x_;
+      return *this;
+    }
+
+    Offset2D & setY( int32_t y_ ) VULKAN_HPP_NOEXCEPT
+    {
+      y = y_;
+      return *this;
+    }
+
+
+    operator VkOffset2D const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkOffset2D*>( this );
+    }
+
+    operator VkOffset2D &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkOffset2D*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( Offset2D const& ) const = default;
+#else
+    bool operator==( Offset2D const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( x == rhs.x )
+          && ( y == rhs.y );
+    }
+
+    bool operator!=( Offset2D const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    int32_t x = {};
+    int32_t y = {};
+
+  };
+  static_assert( sizeof( Offset2D ) == sizeof( VkOffset2D ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<Offset2D>::value, "struct wrapper is not a standard layout!" );
+
+  struct Rect2D
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR Rect2D(VULKAN_HPP_NAMESPACE::Offset2D offset_ = {}, VULKAN_HPP_NAMESPACE::Extent2D extent_ = {}) VULKAN_HPP_NOEXCEPT
+    : offset( offset_ ), extent( extent_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR Rect2D( Rect2D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    Rect2D( VkRect2D const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    Rect2D & operator=( VkRect2D const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Rect2D const *>( &rhs );
+      return *this;
+    }
+
+    Rect2D & operator=( Rect2D const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( Rect2D ) );
+      return *this;
+    }
+
+    Rect2D & setOffset( VULKAN_HPP_NAMESPACE::Offset2D const & offset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      offset = offset_;
+      return *this;
+    }
+
+    Rect2D & setExtent( VULKAN_HPP_NAMESPACE::Extent2D const & extent_ ) VULKAN_HPP_NOEXCEPT
+    {
+      extent = extent_;
+      return *this;
+    }
+
+
+    operator VkRect2D const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkRect2D*>( this );
+    }
+
+    operator VkRect2D &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkRect2D*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( Rect2D const& ) const = default;
+#else
+    bool operator==( Rect2D const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( offset == rhs.offset )
+          && ( extent == rhs.extent );
+    }
+
+    bool operator!=( Rect2D const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::Offset2D offset = {};
+    VULKAN_HPP_NAMESPACE::Extent2D extent = {};
+
+  };
+  static_assert( sizeof( Rect2D ) == sizeof( VkRect2D ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<Rect2D>::value, "struct wrapper is not a standard layout!" );
+
+  struct BindImageMemoryDeviceGroupInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImageMemoryDeviceGroupInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR BindImageMemoryDeviceGroupInfo(uint32_t deviceIndexCount_ = {}, const uint32_t* pDeviceIndices_ = {}, uint32_t splitInstanceBindRegionCount_ = {}, const VULKAN_HPP_NAMESPACE::Rect2D* pSplitInstanceBindRegions_ = {}) VULKAN_HPP_NOEXCEPT
+    : deviceIndexCount( deviceIndexCount_ ), pDeviceIndices( pDeviceIndices_ ), splitInstanceBindRegionCount( splitInstanceBindRegionCount_ ), pSplitInstanceBindRegions( pSplitInstanceBindRegions_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR BindImageMemoryDeviceGroupInfo( BindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    BindImageMemoryDeviceGroupInfo( VkBindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    BindImageMemoryDeviceGroupInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & splitInstanceBindRegions_ = {} )
+    : deviceIndexCount( static_cast<uint32_t>( deviceIndices_.size() ) ), pDeviceIndices( deviceIndices_.data() ), splitInstanceBindRegionCount( static_cast<uint32_t>( splitInstanceBindRegions_.size() ) ), pSplitInstanceBindRegions( splitInstanceBindRegions_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    BindImageMemoryDeviceGroupInfo & operator=( VkBindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindImageMemoryDeviceGroupInfo const *>( &rhs );
+      return *this;
+    }
+
+    BindImageMemoryDeviceGroupInfo & operator=( BindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( BindImageMemoryDeviceGroupInfo ) );
+      return *this;
+    }
+
+    BindImageMemoryDeviceGroupInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    BindImageMemoryDeviceGroupInfo & setDeviceIndexCount( uint32_t deviceIndexCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      deviceIndexCount = deviceIndexCount_;
+      return *this;
+    }
+
+    BindImageMemoryDeviceGroupInfo & setPDeviceIndices( const uint32_t* pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pDeviceIndices = pDeviceIndices_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    BindImageMemoryDeviceGroupInfo & setDeviceIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_ ) VULKAN_HPP_NOEXCEPT
+    {
+      deviceIndexCount = static_cast<uint32_t>( deviceIndices_.size() );
+      pDeviceIndices = deviceIndices_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    BindImageMemoryDeviceGroupInfo & setSplitInstanceBindRegionCount( uint32_t splitInstanceBindRegionCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      splitInstanceBindRegionCount = splitInstanceBindRegionCount_;
+      return *this;
+    }
+
+    BindImageMemoryDeviceGroupInfo & setPSplitInstanceBindRegions( const VULKAN_HPP_NAMESPACE::Rect2D* pSplitInstanceBindRegions_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pSplitInstanceBindRegions = pSplitInstanceBindRegions_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    BindImageMemoryDeviceGroupInfo & setSplitInstanceBindRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & splitInstanceBindRegions_ ) VULKAN_HPP_NOEXCEPT
+    {
+      splitInstanceBindRegionCount = static_cast<uint32_t>( splitInstanceBindRegions_.size() );
+      pSplitInstanceBindRegions = splitInstanceBindRegions_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkBindImageMemoryDeviceGroupInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBindImageMemoryDeviceGroupInfo*>( this );
+    }
+
+    operator VkBindImageMemoryDeviceGroupInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBindImageMemoryDeviceGroupInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( BindImageMemoryDeviceGroupInfo const& ) const = default;
+#else
+    bool operator==( BindImageMemoryDeviceGroupInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( deviceIndexCount == rhs.deviceIndexCount )
+          && ( pDeviceIndices == rhs.pDeviceIndices )
+          && ( splitInstanceBindRegionCount == rhs.splitInstanceBindRegionCount )
+          && ( pSplitInstanceBindRegions == rhs.pSplitInstanceBindRegions );
+    }
+
+    bool operator!=( BindImageMemoryDeviceGroupInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImageMemoryDeviceGroupInfo;
+    const void* pNext = {};
+    uint32_t deviceIndexCount = {};
+    const uint32_t* pDeviceIndices = {};
+    uint32_t splitInstanceBindRegionCount = {};
+    const VULKAN_HPP_NAMESPACE::Rect2D* pSplitInstanceBindRegions = {};
+
+  };
+  static_assert( sizeof( BindImageMemoryDeviceGroupInfo ) == sizeof( VkBindImageMemoryDeviceGroupInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<BindImageMemoryDeviceGroupInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eBindImageMemoryDeviceGroupInfo>
+  {
+    using Type = BindImageMemoryDeviceGroupInfo;
+  };
+  using BindImageMemoryDeviceGroupInfoKHR = BindImageMemoryDeviceGroupInfo;
+
+  class Image
+  {
+  public:
+    using CType = VkImage;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eImage;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImage;
+
+  public:
+    VULKAN_HPP_CONSTEXPR Image() VULKAN_HPP_NOEXCEPT
+      : m_image(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR Image( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_image(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT Image( VkImage image ) VULKAN_HPP_NOEXCEPT
+      : m_image( image )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    Image & operator=(VkImage image) VULKAN_HPP_NOEXCEPT
+    {
+      m_image = image;
+      return *this;
+    }
+#endif
+
+    Image & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_image = VK_NULL_HANDLE;
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( Image const& ) const = default;
+#else
+    bool operator==( Image const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_image == rhs.m_image;
+    }
+
+    bool operator!=(Image const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_image != rhs.m_image;
+    }
+
+    bool operator<(Image const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_image < rhs.m_image;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkImage() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_image;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_image != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_image == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkImage m_image;
+  };
+  static_assert( sizeof( VULKAN_HPP_NAMESPACE::Image ) == sizeof( VkImage ), "handle and wrapper have different size!" );
+
+  template <>
+  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eImage>
+  {
+    using type = VULKAN_HPP_NAMESPACE::Image;
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eImage>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::Image;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImage>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::Image;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Image>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  struct BindImageMemoryInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImageMemoryInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR BindImageMemoryInfo(VULKAN_HPP_NAMESPACE::Image image_ = {}, VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}) VULKAN_HPP_NOEXCEPT
+    : image( image_ ), memory( memory_ ), memoryOffset( memoryOffset_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR BindImageMemoryInfo( BindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    BindImageMemoryInfo( VkBindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    BindImageMemoryInfo & operator=( VkBindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindImageMemoryInfo const *>( &rhs );
+      return *this;
+    }
+
+    BindImageMemoryInfo & operator=( BindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( BindImageMemoryInfo ) );
+      return *this;
+    }
+
+    BindImageMemoryInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    BindImageMemoryInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
+    {
+      image = image_;
+      return *this;
+    }
+
+    BindImageMemoryInfo & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memory = memory_;
+      return *this;
+    }
+
+    BindImageMemoryInfo & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memoryOffset = memoryOffset_;
+      return *this;
+    }
+
+
+    operator VkBindImageMemoryInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBindImageMemoryInfo*>( this );
+    }
+
+    operator VkBindImageMemoryInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBindImageMemoryInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( BindImageMemoryInfo const& ) const = default;
+#else
+    bool operator==( BindImageMemoryInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( image == rhs.image )
+          && ( memory == rhs.memory )
+          && ( memoryOffset == rhs.memoryOffset );
+    }
+
+    bool operator!=( BindImageMemoryInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImageMemoryInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Image image = {};
+    VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {};
+
+  };
+  static_assert( sizeof( BindImageMemoryInfo ) == sizeof( VkBindImageMemoryInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<BindImageMemoryInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eBindImageMemoryInfo>
+  {
+    using Type = BindImageMemoryInfo;
+  };
+  using BindImageMemoryInfoKHR = BindImageMemoryInfo;
+
+  struct BindImageMemorySwapchainInfoKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImageMemorySwapchainInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR BindImageMemorySwapchainInfoKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {}, uint32_t imageIndex_ = {}) VULKAN_HPP_NOEXCEPT
+    : swapchain( swapchain_ ), imageIndex( imageIndex_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR BindImageMemorySwapchainInfoKHR( BindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    BindImageMemorySwapchainInfoKHR( VkBindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    BindImageMemorySwapchainInfoKHR & operator=( VkBindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindImageMemorySwapchainInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+    BindImageMemorySwapchainInfoKHR & operator=( BindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( BindImageMemorySwapchainInfoKHR ) );
+      return *this;
+    }
+
+    BindImageMemorySwapchainInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    BindImageMemorySwapchainInfoKHR & setSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT
+    {
+      swapchain = swapchain_;
+      return *this;
+    }
+
+    BindImageMemorySwapchainInfoKHR & setImageIndex( uint32_t imageIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageIndex = imageIndex_;
+      return *this;
+    }
+
+
+    operator VkBindImageMemorySwapchainInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBindImageMemorySwapchainInfoKHR*>( this );
+    }
+
+    operator VkBindImageMemorySwapchainInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBindImageMemorySwapchainInfoKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( BindImageMemorySwapchainInfoKHR const& ) const = default;
+#else
+    bool operator==( BindImageMemorySwapchainInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( swapchain == rhs.swapchain )
+          && ( imageIndex == rhs.imageIndex );
+    }
+
+    bool operator!=( BindImageMemorySwapchainInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImageMemorySwapchainInfoKHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {};
+    uint32_t imageIndex = {};
+
+  };
+  static_assert( sizeof( BindImageMemorySwapchainInfoKHR ) == sizeof( VkBindImageMemorySwapchainInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<BindImageMemorySwapchainInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eBindImageMemorySwapchainInfoKHR>
+  {
+    using Type = BindImageMemorySwapchainInfoKHR;
+  };
+
+  struct BindImagePlaneMemoryInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImagePlaneMemoryInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR BindImagePlaneMemoryInfo(VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor) VULKAN_HPP_NOEXCEPT
+    : planeAspect( planeAspect_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR BindImagePlaneMemoryInfo( BindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    BindImagePlaneMemoryInfo( VkBindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    BindImagePlaneMemoryInfo & operator=( VkBindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindImagePlaneMemoryInfo const *>( &rhs );
+      return *this;
+    }
+
+    BindImagePlaneMemoryInfo & operator=( BindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( BindImagePlaneMemoryInfo ) );
+      return *this;
+    }
+
+    BindImagePlaneMemoryInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    BindImagePlaneMemoryInfo & setPlaneAspect( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ ) VULKAN_HPP_NOEXCEPT
+    {
+      planeAspect = planeAspect_;
+      return *this;
+    }
+
+
+    operator VkBindImagePlaneMemoryInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBindImagePlaneMemoryInfo*>( this );
+    }
+
+    operator VkBindImagePlaneMemoryInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBindImagePlaneMemoryInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( BindImagePlaneMemoryInfo const& ) const = default;
+#else
+    bool operator==( BindImagePlaneMemoryInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( planeAspect == rhs.planeAspect );
+    }
+
+    bool operator!=( BindImagePlaneMemoryInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImagePlaneMemoryInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor;
+
+  };
+  static_assert( sizeof( BindImagePlaneMemoryInfo ) == sizeof( VkBindImagePlaneMemoryInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<BindImagePlaneMemoryInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eBindImagePlaneMemoryInfo>
+  {
+    using Type = BindImagePlaneMemoryInfo;
+  };
+  using BindImagePlaneMemoryInfoKHR = BindImagePlaneMemoryInfo;
+
+  struct BindIndexBufferIndirectCommandNV
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR BindIndexBufferIndirectCommandNV(VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ = {}, uint32_t size_ = {}, VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16) VULKAN_HPP_NOEXCEPT
+    : bufferAddress( bufferAddress_ ), size( size_ ), indexType( indexType_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR BindIndexBufferIndirectCommandNV( BindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    BindIndexBufferIndirectCommandNV( VkBindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    BindIndexBufferIndirectCommandNV & operator=( VkBindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindIndexBufferIndirectCommandNV const *>( &rhs );
+      return *this;
+    }
+
+    BindIndexBufferIndirectCommandNV & operator=( BindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( BindIndexBufferIndirectCommandNV ) );
+      return *this;
+    }
+
+    BindIndexBufferIndirectCommandNV & setBufferAddress( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bufferAddress = bufferAddress_;
+      return *this;
+    }
+
+    BindIndexBufferIndirectCommandNV & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT
+    {
+      size = size_;
+      return *this;
+    }
+
+    BindIndexBufferIndirectCommandNV & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      indexType = indexType_;
+      return *this;
+    }
+
+
+    operator VkBindIndexBufferIndirectCommandNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBindIndexBufferIndirectCommandNV*>( this );
+    }
+
+    operator VkBindIndexBufferIndirectCommandNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBindIndexBufferIndirectCommandNV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( BindIndexBufferIndirectCommandNV const& ) const = default;
+#else
+    bool operator==( BindIndexBufferIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( bufferAddress == rhs.bufferAddress )
+          && ( size == rhs.size )
+          && ( indexType == rhs.indexType );
+    }
+
+    bool operator!=( BindIndexBufferIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress = {};
+    uint32_t size = {};
+    VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16;
+
+  };
+  static_assert( sizeof( BindIndexBufferIndirectCommandNV ) == sizeof( VkBindIndexBufferIndirectCommandNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<BindIndexBufferIndirectCommandNV>::value, "struct wrapper is not a standard layout!" );
+
+  struct BindShaderGroupIndirectCommandNV
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR BindShaderGroupIndirectCommandNV(uint32_t groupIndex_ = {}) VULKAN_HPP_NOEXCEPT
+    : groupIndex( groupIndex_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR BindShaderGroupIndirectCommandNV( BindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    BindShaderGroupIndirectCommandNV( VkBindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    BindShaderGroupIndirectCommandNV & operator=( VkBindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindShaderGroupIndirectCommandNV const *>( &rhs );
+      return *this;
+    }
+
+    BindShaderGroupIndirectCommandNV & operator=( BindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( BindShaderGroupIndirectCommandNV ) );
+      return *this;
+    }
+
+    BindShaderGroupIndirectCommandNV & setGroupIndex( uint32_t groupIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      groupIndex = groupIndex_;
+      return *this;
+    }
+
+
+    operator VkBindShaderGroupIndirectCommandNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBindShaderGroupIndirectCommandNV*>( this );
+    }
+
+    operator VkBindShaderGroupIndirectCommandNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBindShaderGroupIndirectCommandNV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( BindShaderGroupIndirectCommandNV const& ) const = default;
+#else
+    bool operator==( BindShaderGroupIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( groupIndex == rhs.groupIndex );
+    }
+
+    bool operator!=( BindShaderGroupIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    uint32_t groupIndex = {};
+
+  };
+  static_assert( sizeof( BindShaderGroupIndirectCommandNV ) == sizeof( VkBindShaderGroupIndirectCommandNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<BindShaderGroupIndirectCommandNV>::value, "struct wrapper is not a standard layout!" );
+
+  struct SparseMemoryBind
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR SparseMemoryBind(VULKAN_HPP_NAMESPACE::DeviceSize resourceOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ = {}) VULKAN_HPP_NOEXCEPT
+    : resourceOffset( resourceOffset_ ), size( size_ ), memory( memory_ ), memoryOffset( memoryOffset_ ), flags( flags_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SparseMemoryBind( SparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SparseMemoryBind( VkSparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    SparseMemoryBind & operator=( VkSparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseMemoryBind const *>( &rhs );
+      return *this;
+    }
+
+    SparseMemoryBind & operator=( SparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( SparseMemoryBind ) );
+      return *this;
+    }
+
+    SparseMemoryBind & setResourceOffset( VULKAN_HPP_NAMESPACE::DeviceSize resourceOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      resourceOffset = resourceOffset_;
+      return *this;
+    }
+
+    SparseMemoryBind & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
+    {
+      size = size_;
+      return *this;
+    }
+
+    SparseMemoryBind & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memory = memory_;
+      return *this;
+    }
+
+    SparseMemoryBind & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memoryOffset = memoryOffset_;
+      return *this;
+    }
+
+    SparseMemoryBind & setFlags( VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ ) VULKAN_HPP_NOEXCEPT
     {
       flags = flags_;
       return *this;
     }
 
-    operator VkSparseMemoryBind const&() const
+
+    operator VkSparseMemoryBind const&() const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkSparseMemoryBind*>(this);
+      return *reinterpret_cast<const VkSparseMemoryBind*>( this );
     }
 
-    operator VkSparseMemoryBind &()
+    operator VkSparseMemoryBind &() VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkSparseMemoryBind*>(this);
+      return *reinterpret_cast<VkSparseMemoryBind*>( this );
     }
 
-    bool operator==( SparseMemoryBind const& rhs ) const
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( SparseMemoryBind const& ) const = default;
+#else
+    bool operator==( SparseMemoryBind const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( resourceOffset == rhs.resourceOffset )
           && ( size == rhs.size )
@@ -26220,93 +20615,578 @@
           && ( flags == rhs.flags );
     }
 
-    bool operator!=( SparseMemoryBind const& rhs ) const
+    bool operator!=( SparseMemoryBind const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
+#endif
 
-    DeviceSize resourceOffset;
-    DeviceSize size;
-    DeviceMemory memory;
-    DeviceSize memoryOffset;
-    SparseMemoryBindFlags flags;
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::DeviceSize resourceOffset = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize size = {};
+    VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {};
+    VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags = {};
+
   };
   static_assert( sizeof( SparseMemoryBind ) == sizeof( VkSparseMemoryBind ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SparseMemoryBind>::value, "struct wrapper is not a standard layout!" );
+
+  struct SparseBufferMemoryBindInfo
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR SparseBufferMemoryBindInfo(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, uint32_t bindCount_ = {}, const VULKAN_HPP_NAMESPACE::SparseMemoryBind* pBinds_ = {}) VULKAN_HPP_NOEXCEPT
+    : buffer( buffer_ ), bindCount( bindCount_ ), pBinds( pBinds_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SparseBufferMemoryBindInfo( SparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SparseBufferMemoryBindInfo( VkSparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    SparseBufferMemoryBindInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseMemoryBind> const & binds_ )
+    : buffer( buffer_ ), bindCount( static_cast<uint32_t>( binds_.size() ) ), pBinds( binds_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    SparseBufferMemoryBindInfo & operator=( VkSparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo const *>( &rhs );
+      return *this;
+    }
+
+    SparseBufferMemoryBindInfo & operator=( SparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( SparseBufferMemoryBindInfo ) );
+      return *this;
+    }
+
+    SparseBufferMemoryBindInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      buffer = buffer_;
+      return *this;
+    }
+
+    SparseBufferMemoryBindInfo & setBindCount( uint32_t bindCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bindCount = bindCount_;
+      return *this;
+    }
+
+    SparseBufferMemoryBindInfo & setPBinds( const VULKAN_HPP_NAMESPACE::SparseMemoryBind* pBinds_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pBinds = pBinds_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    SparseBufferMemoryBindInfo & setBinds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseMemoryBind> const & binds_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bindCount = static_cast<uint32_t>( binds_.size() );
+      pBinds = binds_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkSparseBufferMemoryBindInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSparseBufferMemoryBindInfo*>( this );
+    }
+
+    operator VkSparseBufferMemoryBindInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSparseBufferMemoryBindInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( SparseBufferMemoryBindInfo const& ) const = default;
+#else
+    bool operator==( SparseBufferMemoryBindInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( buffer == rhs.buffer )
+          && ( bindCount == rhs.bindCount )
+          && ( pBinds == rhs.pBinds );
+    }
+
+    bool operator!=( SparseBufferMemoryBindInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::Buffer buffer = {};
+    uint32_t bindCount = {};
+    const VULKAN_HPP_NAMESPACE::SparseMemoryBind* pBinds = {};
+
+  };
+  static_assert( sizeof( SparseBufferMemoryBindInfo ) == sizeof( VkSparseBufferMemoryBindInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SparseBufferMemoryBindInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct SparseImageOpaqueMemoryBindInfo
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR SparseImageOpaqueMemoryBindInfo(VULKAN_HPP_NAMESPACE::Image image_ = {}, uint32_t bindCount_ = {}, const VULKAN_HPP_NAMESPACE::SparseMemoryBind* pBinds_ = {}) VULKAN_HPP_NOEXCEPT
+    : image( image_ ), bindCount( bindCount_ ), pBinds( pBinds_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SparseImageOpaqueMemoryBindInfo( SparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SparseImageOpaqueMemoryBindInfo( VkSparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    SparseImageOpaqueMemoryBindInfo( VULKAN_HPP_NAMESPACE::Image image_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseMemoryBind> const & binds_ )
+    : image( image_ ), bindCount( static_cast<uint32_t>( binds_.size() ) ), pBinds( binds_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    SparseImageOpaqueMemoryBindInfo & operator=( VkSparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo const *>( &rhs );
+      return *this;
+    }
+
+    SparseImageOpaqueMemoryBindInfo & operator=( SparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( SparseImageOpaqueMemoryBindInfo ) );
+      return *this;
+    }
+
+    SparseImageOpaqueMemoryBindInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
+    {
+      image = image_;
+      return *this;
+    }
+
+    SparseImageOpaqueMemoryBindInfo & setBindCount( uint32_t bindCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bindCount = bindCount_;
+      return *this;
+    }
+
+    SparseImageOpaqueMemoryBindInfo & setPBinds( const VULKAN_HPP_NAMESPACE::SparseMemoryBind* pBinds_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pBinds = pBinds_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    SparseImageOpaqueMemoryBindInfo & setBinds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseMemoryBind> const & binds_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bindCount = static_cast<uint32_t>( binds_.size() );
+      pBinds = binds_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkSparseImageOpaqueMemoryBindInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSparseImageOpaqueMemoryBindInfo*>( this );
+    }
+
+    operator VkSparseImageOpaqueMemoryBindInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSparseImageOpaqueMemoryBindInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( SparseImageOpaqueMemoryBindInfo const& ) const = default;
+#else
+    bool operator==( SparseImageOpaqueMemoryBindInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( image == rhs.image )
+          && ( bindCount == rhs.bindCount )
+          && ( pBinds == rhs.pBinds );
+    }
+
+    bool operator!=( SparseImageOpaqueMemoryBindInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::Image image = {};
+    uint32_t bindCount = {};
+    const VULKAN_HPP_NAMESPACE::SparseMemoryBind* pBinds = {};
+
+  };
+  static_assert( sizeof( SparseImageOpaqueMemoryBindInfo ) == sizeof( VkSparseImageOpaqueMemoryBindInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SparseImageOpaqueMemoryBindInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct ImageSubresource
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ImageSubresource(VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, uint32_t mipLevel_ = {}, uint32_t arrayLayer_ = {}) VULKAN_HPP_NOEXCEPT
+    : aspectMask( aspectMask_ ), mipLevel( mipLevel_ ), arrayLayer( arrayLayer_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImageSubresource( ImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImageSubresource( VkImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ImageSubresource & operator=( VkImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSubresource const *>( &rhs );
+      return *this;
+    }
+
+    ImageSubresource & operator=( ImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ImageSubresource ) );
+      return *this;
+    }
+
+    ImageSubresource & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      aspectMask = aspectMask_;
+      return *this;
+    }
+
+    ImageSubresource & setMipLevel( uint32_t mipLevel_ ) VULKAN_HPP_NOEXCEPT
+    {
+      mipLevel = mipLevel_;
+      return *this;
+    }
+
+    ImageSubresource & setArrayLayer( uint32_t arrayLayer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      arrayLayer = arrayLayer_;
+      return *this;
+    }
+
+
+    operator VkImageSubresource const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageSubresource*>( this );
+    }
+
+    operator VkImageSubresource &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageSubresource*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ImageSubresource const& ) const = default;
+#else
+    bool operator==( ImageSubresource const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( aspectMask == rhs.aspectMask )
+          && ( mipLevel == rhs.mipLevel )
+          && ( arrayLayer == rhs.arrayLayer );
+    }
+
+    bool operator!=( ImageSubresource const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
+    uint32_t mipLevel = {};
+    uint32_t arrayLayer = {};
+
+  };
+  static_assert( sizeof( ImageSubresource ) == sizeof( VkImageSubresource ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImageSubresource>::value, "struct wrapper is not a standard layout!" );
+
+  struct Offset3D
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR Offset3D(int32_t x_ = {}, int32_t y_ = {}, int32_t z_ = {}) VULKAN_HPP_NOEXCEPT
+    : x( x_ ), y( y_ ), z( z_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR Offset3D( Offset3D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    Offset3D( VkOffset3D const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    explicit Offset3D( Offset2D const& offset2D, int32_t z_ = {} )
+      : x( offset2D.x )
+      , y( offset2D.y )
+      , z( z_ )
+    {}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    Offset3D & operator=( VkOffset3D const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Offset3D const *>( &rhs );
+      return *this;
+    }
+
+    Offset3D & operator=( Offset3D const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( Offset3D ) );
+      return *this;
+    }
+
+    Offset3D & setX( int32_t x_ ) VULKAN_HPP_NOEXCEPT
+    {
+      x = x_;
+      return *this;
+    }
+
+    Offset3D & setY( int32_t y_ ) VULKAN_HPP_NOEXCEPT
+    {
+      y = y_;
+      return *this;
+    }
+
+    Offset3D & setZ( int32_t z_ ) VULKAN_HPP_NOEXCEPT
+    {
+      z = z_;
+      return *this;
+    }
+
+
+    operator VkOffset3D const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkOffset3D*>( this );
+    }
+
+    operator VkOffset3D &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkOffset3D*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( Offset3D const& ) const = default;
+#else
+    bool operator==( Offset3D const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( x == rhs.x )
+          && ( y == rhs.y )
+          && ( z == rhs.z );
+    }
+
+    bool operator!=( Offset3D const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    int32_t x = {};
+    int32_t y = {};
+    int32_t z = {};
+
+  };
+  static_assert( sizeof( Offset3D ) == sizeof( VkOffset3D ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<Offset3D>::value, "struct wrapper is not a standard layout!" );
+
+  struct Extent3D
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR Extent3D(uint32_t width_ = {}, uint32_t height_ = {}, uint32_t depth_ = {}) VULKAN_HPP_NOEXCEPT
+    : width( width_ ), height( height_ ), depth( depth_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR Extent3D( Extent3D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    Extent3D( VkExtent3D const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    explicit Extent3D( Extent2D const& extent2D, uint32_t depth_ = {} )
+      : width( extent2D.width )
+      , height( extent2D.height )
+      , depth( depth_ )
+    {}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    Extent3D & operator=( VkExtent3D const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Extent3D const *>( &rhs );
+      return *this;
+    }
+
+    Extent3D & operator=( Extent3D const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( Extent3D ) );
+      return *this;
+    }
+
+    Extent3D & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT
+    {
+      width = width_;
+      return *this;
+    }
+
+    Extent3D & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT
+    {
+      height = height_;
+      return *this;
+    }
+
+    Extent3D & setDepth( uint32_t depth_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depth = depth_;
+      return *this;
+    }
+
+
+    operator VkExtent3D const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExtent3D*>( this );
+    }
+
+    operator VkExtent3D &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExtent3D*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( Extent3D const& ) const = default;
+#else
+    bool operator==( Extent3D const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( width == rhs.width )
+          && ( height == rhs.height )
+          && ( depth == rhs.depth );
+    }
+
+    bool operator!=( Extent3D const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    uint32_t width = {};
+    uint32_t height = {};
+    uint32_t depth = {};
+
+  };
+  static_assert( sizeof( Extent3D ) == sizeof( VkExtent3D ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<Extent3D>::value, "struct wrapper is not a standard layout!" );
 
   struct SparseImageMemoryBind
   {
-    SparseImageMemoryBind( ImageSubresource subresource_ = ImageSubresource(),
-                           Offset3D offset_ = Offset3D(),
-                           Extent3D extent_ = Extent3D(),
-                           DeviceMemory memory_ = DeviceMemory(),
-                           DeviceSize memoryOffset_ = 0,
-                           SparseMemoryBindFlags flags_ = SparseMemoryBindFlags() )
-      : subresource( subresource_ )
-      , offset( offset_ )
-      , extent( extent_ )
-      , memory( memory_ )
-      , memoryOffset( memoryOffset_ )
-      , flags( flags_ )
-    {
-    }
 
-    SparseImageMemoryBind( VkSparseImageMemoryBind const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( SparseImageMemoryBind ) );
-    }
 
-    SparseImageMemoryBind& operator=( VkSparseImageMemoryBind const & rhs )
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR SparseImageMemoryBind(VULKAN_HPP_NAMESPACE::ImageSubresource subresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D offset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}, VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ = {}) VULKAN_HPP_NOEXCEPT
+    : subresource( subresource_ ), offset( offset_ ), extent( extent_ ), memory( memory_ ), memoryOffset( memoryOffset_ ), flags( flags_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SparseImageMemoryBind( SparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SparseImageMemoryBind( VkSparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      memcpy( this, &rhs, sizeof( SparseImageMemoryBind ) );
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    SparseImageMemoryBind & operator=( VkSparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageMemoryBind const *>( &rhs );
       return *this;
     }
-    SparseImageMemoryBind& setSubresource( ImageSubresource subresource_ )
+
+    SparseImageMemoryBind & operator=( SparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( SparseImageMemoryBind ) );
+      return *this;
+    }
+
+    SparseImageMemoryBind & setSubresource( VULKAN_HPP_NAMESPACE::ImageSubresource const & subresource_ ) VULKAN_HPP_NOEXCEPT
     {
       subresource = subresource_;
       return *this;
     }
 
-    SparseImageMemoryBind& setOffset( Offset3D offset_ )
+    SparseImageMemoryBind & setOffset( VULKAN_HPP_NAMESPACE::Offset3D const & offset_ ) VULKAN_HPP_NOEXCEPT
     {
       offset = offset_;
       return *this;
     }
 
-    SparseImageMemoryBind& setExtent( Extent3D extent_ )
+    SparseImageMemoryBind & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
     {
       extent = extent_;
       return *this;
     }
 
-    SparseImageMemoryBind& setMemory( DeviceMemory memory_ )
+    SparseImageMemoryBind & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
     {
       memory = memory_;
       return *this;
     }
 
-    SparseImageMemoryBind& setMemoryOffset( DeviceSize memoryOffset_ )
+    SparseImageMemoryBind & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
     {
       memoryOffset = memoryOffset_;
       return *this;
     }
 
-    SparseImageMemoryBind& setFlags( SparseMemoryBindFlags flags_ )
+    SparseImageMemoryBind & setFlags( VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ ) VULKAN_HPP_NOEXCEPT
     {
       flags = flags_;
       return *this;
     }
 
-    operator VkSparseImageMemoryBind const&() const
+
+    operator VkSparseImageMemoryBind const&() const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkSparseImageMemoryBind*>(this);
+      return *reinterpret_cast<const VkSparseImageMemoryBind*>( this );
     }
 
-    operator VkSparseImageMemoryBind &()
+    operator VkSparseImageMemoryBind &() VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkSparseImageMemoryBind*>(this);
+      return *reinterpret_cast<VkSparseImageMemoryBind*>( this );
     }
 
-    bool operator==( SparseImageMemoryBind const& rhs ) const
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( SparseImageMemoryBind const& ) const = default;
+#else
+    bool operator==( SparseImageMemoryBind const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( subresource == rhs.subresource )
           && ( offset == rhs.offset )
@@ -26316,333 +21196,290 @@
           && ( flags == rhs.flags );
     }
 
-    bool operator!=( SparseImageMemoryBind const& rhs ) const
+    bool operator!=( SparseImageMemoryBind const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
+#endif
 
-    ImageSubresource subresource;
-    Offset3D offset;
-    Extent3D extent;
-    DeviceMemory memory;
-    DeviceSize memoryOffset;
-    SparseMemoryBindFlags flags;
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::ImageSubresource subresource = {};
+    VULKAN_HPP_NAMESPACE::Offset3D offset = {};
+    VULKAN_HPP_NAMESPACE::Extent3D extent = {};
+    VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {};
+    VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags = {};
+
   };
   static_assert( sizeof( SparseImageMemoryBind ) == sizeof( VkSparseImageMemoryBind ), "struct and wrapper have different size!" );
-
-  struct SparseBufferMemoryBindInfo
-  {
-    SparseBufferMemoryBindInfo( Buffer buffer_ = Buffer(),
-                                uint32_t bindCount_ = 0,
-                                const SparseMemoryBind* pBinds_ = nullptr )
-      : buffer( buffer_ )
-      , bindCount( bindCount_ )
-      , pBinds( pBinds_ )
-    {
-    }
-
-    SparseBufferMemoryBindInfo( VkSparseBufferMemoryBindInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( SparseBufferMemoryBindInfo ) );
-    }
-
-    SparseBufferMemoryBindInfo& operator=( VkSparseBufferMemoryBindInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( SparseBufferMemoryBindInfo ) );
-      return *this;
-    }
-    SparseBufferMemoryBindInfo& setBuffer( Buffer buffer_ )
-    {
-      buffer = buffer_;
-      return *this;
-    }
-
-    SparseBufferMemoryBindInfo& setBindCount( uint32_t bindCount_ )
-    {
-      bindCount = bindCount_;
-      return *this;
-    }
-
-    SparseBufferMemoryBindInfo& setPBinds( const SparseMemoryBind* pBinds_ )
-    {
-      pBinds = pBinds_;
-      return *this;
-    }
-
-    operator VkSparseBufferMemoryBindInfo const&() const
-    {
-      return *reinterpret_cast<const VkSparseBufferMemoryBindInfo*>(this);
-    }
-
-    operator VkSparseBufferMemoryBindInfo &()
-    {
-      return *reinterpret_cast<VkSparseBufferMemoryBindInfo*>(this);
-    }
-
-    bool operator==( SparseBufferMemoryBindInfo const& rhs ) const
-    {
-      return ( buffer == rhs.buffer )
-          && ( bindCount == rhs.bindCount )
-          && ( pBinds == rhs.pBinds );
-    }
-
-    bool operator!=( SparseBufferMemoryBindInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    Buffer buffer;
-    uint32_t bindCount;
-    const SparseMemoryBind* pBinds;
-  };
-  static_assert( sizeof( SparseBufferMemoryBindInfo ) == sizeof( VkSparseBufferMemoryBindInfo ), "struct and wrapper have different size!" );
-
-  struct SparseImageOpaqueMemoryBindInfo
-  {
-    SparseImageOpaqueMemoryBindInfo( Image image_ = Image(),
-                                     uint32_t bindCount_ = 0,
-                                     const SparseMemoryBind* pBinds_ = nullptr )
-      : image( image_ )
-      , bindCount( bindCount_ )
-      , pBinds( pBinds_ )
-    {
-    }
-
-    SparseImageOpaqueMemoryBindInfo( VkSparseImageOpaqueMemoryBindInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( SparseImageOpaqueMemoryBindInfo ) );
-    }
-
-    SparseImageOpaqueMemoryBindInfo& operator=( VkSparseImageOpaqueMemoryBindInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( SparseImageOpaqueMemoryBindInfo ) );
-      return *this;
-    }
-    SparseImageOpaqueMemoryBindInfo& setImage( Image image_ )
-    {
-      image = image_;
-      return *this;
-    }
-
-    SparseImageOpaqueMemoryBindInfo& setBindCount( uint32_t bindCount_ )
-    {
-      bindCount = bindCount_;
-      return *this;
-    }
-
-    SparseImageOpaqueMemoryBindInfo& setPBinds( const SparseMemoryBind* pBinds_ )
-    {
-      pBinds = pBinds_;
-      return *this;
-    }
-
-    operator VkSparseImageOpaqueMemoryBindInfo const&() const
-    {
-      return *reinterpret_cast<const VkSparseImageOpaqueMemoryBindInfo*>(this);
-    }
-
-    operator VkSparseImageOpaqueMemoryBindInfo &()
-    {
-      return *reinterpret_cast<VkSparseImageOpaqueMemoryBindInfo*>(this);
-    }
-
-    bool operator==( SparseImageOpaqueMemoryBindInfo const& rhs ) const
-    {
-      return ( image == rhs.image )
-          && ( bindCount == rhs.bindCount )
-          && ( pBinds == rhs.pBinds );
-    }
-
-    bool operator!=( SparseImageOpaqueMemoryBindInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    Image image;
-    uint32_t bindCount;
-    const SparseMemoryBind* pBinds;
-  };
-  static_assert( sizeof( SparseImageOpaqueMemoryBindInfo ) == sizeof( VkSparseImageOpaqueMemoryBindInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SparseImageMemoryBind>::value, "struct wrapper is not a standard layout!" );
 
   struct SparseImageMemoryBindInfo
   {
-    SparseImageMemoryBindInfo( Image image_ = Image(),
-                               uint32_t bindCount_ = 0,
-                               const SparseImageMemoryBind* pBinds_ = nullptr )
-      : image( image_ )
-      , bindCount( bindCount_ )
-      , pBinds( pBinds_ )
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR SparseImageMemoryBindInfo(VULKAN_HPP_NAMESPACE::Image image_ = {}, uint32_t bindCount_ = {}, const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind* pBinds_ = {}) VULKAN_HPP_NOEXCEPT
+    : image( image_ ), bindCount( bindCount_ ), pBinds( pBinds_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SparseImageMemoryBindInfo( SparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SparseImageMemoryBindInfo( VkSparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
     {
+      *this = rhs;
     }
 
-    SparseImageMemoryBindInfo( VkSparseImageMemoryBindInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( SparseImageMemoryBindInfo ) );
-    }
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    SparseImageMemoryBindInfo( VULKAN_HPP_NAMESPACE::Image image_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind> const & binds_ )
+    : image( image_ ), bindCount( static_cast<uint32_t>( binds_.size() ) ), pBinds( binds_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    SparseImageMemoryBindInfo& operator=( VkSparseImageMemoryBindInfo const & rhs )
+    SparseImageMemoryBindInfo & operator=( VkSparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      memcpy( this, &rhs, sizeof( SparseImageMemoryBindInfo ) );
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo const *>( &rhs );
       return *this;
     }
-    SparseImageMemoryBindInfo& setImage( Image image_ )
+
+    SparseImageMemoryBindInfo & operator=( SparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( SparseImageMemoryBindInfo ) );
+      return *this;
+    }
+
+    SparseImageMemoryBindInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
     {
       image = image_;
       return *this;
     }
 
-    SparseImageMemoryBindInfo& setBindCount( uint32_t bindCount_ )
+    SparseImageMemoryBindInfo & setBindCount( uint32_t bindCount_ ) VULKAN_HPP_NOEXCEPT
     {
       bindCount = bindCount_;
       return *this;
     }
 
-    SparseImageMemoryBindInfo& setPBinds( const SparseImageMemoryBind* pBinds_ )
+    SparseImageMemoryBindInfo & setPBinds( const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind* pBinds_ ) VULKAN_HPP_NOEXCEPT
     {
       pBinds = pBinds_;
       return *this;
     }
 
-    operator VkSparseImageMemoryBindInfo const&() const
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    SparseImageMemoryBindInfo & setBinds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind> const & binds_ ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkSparseImageMemoryBindInfo*>(this);
+      bindCount = static_cast<uint32_t>( binds_.size() );
+      pBinds = binds_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkSparseImageMemoryBindInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSparseImageMemoryBindInfo*>( this );
     }
 
-    operator VkSparseImageMemoryBindInfo &()
+    operator VkSparseImageMemoryBindInfo &() VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkSparseImageMemoryBindInfo*>(this);
+      return *reinterpret_cast<VkSparseImageMemoryBindInfo*>( this );
     }
 
-    bool operator==( SparseImageMemoryBindInfo const& rhs ) const
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( SparseImageMemoryBindInfo const& ) const = default;
+#else
+    bool operator==( SparseImageMemoryBindInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( image == rhs.image )
           && ( bindCount == rhs.bindCount )
           && ( pBinds == rhs.pBinds );
     }
 
-    bool operator!=( SparseImageMemoryBindInfo const& rhs ) const
+    bool operator!=( SparseImageMemoryBindInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
+#endif
 
-    Image image;
-    uint32_t bindCount;
-    const SparseImageMemoryBind* pBinds;
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::Image image = {};
+    uint32_t bindCount = {};
+    const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind* pBinds = {};
+
   };
   static_assert( sizeof( SparseImageMemoryBindInfo ) == sizeof( VkSparseImageMemoryBindInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SparseImageMemoryBindInfo>::value, "struct wrapper is not a standard layout!" );
 
   struct BindSparseInfo
   {
-    BindSparseInfo( uint32_t waitSemaphoreCount_ = 0,
-                    const Semaphore* pWaitSemaphores_ = nullptr,
-                    uint32_t bufferBindCount_ = 0,
-                    const SparseBufferMemoryBindInfo* pBufferBinds_ = nullptr,
-                    uint32_t imageOpaqueBindCount_ = 0,
-                    const SparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds_ = nullptr,
-                    uint32_t imageBindCount_ = 0,
-                    const SparseImageMemoryBindInfo* pImageBinds_ = nullptr,
-                    uint32_t signalSemaphoreCount_ = 0,
-                    const Semaphore* pSignalSemaphores_ = nullptr )
-      : waitSemaphoreCount( waitSemaphoreCount_ )
-      , pWaitSemaphores( pWaitSemaphores_ )
-      , bufferBindCount( bufferBindCount_ )
-      , pBufferBinds( pBufferBinds_ )
-      , imageOpaqueBindCount( imageOpaqueBindCount_ )
-      , pImageOpaqueBinds( pImageOpaqueBinds_ )
-      , imageBindCount( imageBindCount_ )
-      , pImageBinds( pImageBinds_ )
-      , signalSemaphoreCount( signalSemaphoreCount_ )
-      , pSignalSemaphores( pSignalSemaphores_ )
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindSparseInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR BindSparseInfo(uint32_t waitSemaphoreCount_ = {}, const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ = {}, uint32_t bufferBindCount_ = {}, const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo* pBufferBinds_ = {}, uint32_t imageOpaqueBindCount_ = {}, const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds_ = {}, uint32_t imageBindCount_ = {}, const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo* pImageBinds_ = {}, uint32_t signalSemaphoreCount_ = {}, const VULKAN_HPP_NAMESPACE::Semaphore* pSignalSemaphores_ = {}) VULKAN_HPP_NOEXCEPT
+    : waitSemaphoreCount( waitSemaphoreCount_ ), pWaitSemaphores( pWaitSemaphores_ ), bufferBindCount( bufferBindCount_ ), pBufferBinds( pBufferBinds_ ), imageOpaqueBindCount( imageOpaqueBindCount_ ), pImageOpaqueBinds( pImageOpaqueBinds_ ), imageBindCount( imageBindCount_ ), pImageBinds( pImageBinds_ ), signalSemaphoreCount( signalSemaphoreCount_ ), pSignalSemaphores( pSignalSemaphores_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR BindSparseInfo( BindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    BindSparseInfo( VkBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
     {
+      *this = rhs;
     }
 
-    BindSparseInfo( VkBindSparseInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( BindSparseInfo ) );
-    }
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    BindSparseInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & waitSemaphores_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo> const & bufferBinds_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo> const & imageOpaqueBinds_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo> const & imageBinds_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & signalSemaphores_ = {} )
+    : waitSemaphoreCount( static_cast<uint32_t>( waitSemaphores_.size() ) ), pWaitSemaphores( waitSemaphores_.data() ), bufferBindCount( static_cast<uint32_t>( bufferBinds_.size() ) ), pBufferBinds( bufferBinds_.data() ), imageOpaqueBindCount( static_cast<uint32_t>( imageOpaqueBinds_.size() ) ), pImageOpaqueBinds( imageOpaqueBinds_.data() ), imageBindCount( static_cast<uint32_t>( imageBinds_.size() ) ), pImageBinds( imageBinds_.data() ), signalSemaphoreCount( static_cast<uint32_t>( signalSemaphores_.size() ) ), pSignalSemaphores( signalSemaphores_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    BindSparseInfo& operator=( VkBindSparseInfo const & rhs )
+    BindSparseInfo & operator=( VkBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      memcpy( this, &rhs, sizeof( BindSparseInfo ) );
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindSparseInfo const *>( &rhs );
       return *this;
     }
-    BindSparseInfo& setPNext( const void* pNext_ )
+
+    BindSparseInfo & operator=( BindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( BindSparseInfo ) );
+      return *this;
+    }
+
+    BindSparseInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
       pNext = pNext_;
       return *this;
     }
 
-    BindSparseInfo& setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ )
+    BindSparseInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
     {
       waitSemaphoreCount = waitSemaphoreCount_;
       return *this;
     }
 
-    BindSparseInfo& setPWaitSemaphores( const Semaphore* pWaitSemaphores_ )
+    BindSparseInfo & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT
     {
       pWaitSemaphores = pWaitSemaphores_;
       return *this;
     }
 
-    BindSparseInfo& setBufferBindCount( uint32_t bufferBindCount_ )
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    BindSparseInfo & setWaitSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & waitSemaphores_ ) VULKAN_HPP_NOEXCEPT
+    {
+      waitSemaphoreCount = static_cast<uint32_t>( waitSemaphores_.size() );
+      pWaitSemaphores = waitSemaphores_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    BindSparseInfo & setBufferBindCount( uint32_t bufferBindCount_ ) VULKAN_HPP_NOEXCEPT
     {
       bufferBindCount = bufferBindCount_;
       return *this;
     }
 
-    BindSparseInfo& setPBufferBinds( const SparseBufferMemoryBindInfo* pBufferBinds_ )
+    BindSparseInfo & setPBufferBinds( const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo* pBufferBinds_ ) VULKAN_HPP_NOEXCEPT
     {
       pBufferBinds = pBufferBinds_;
       return *this;
     }
 
-    BindSparseInfo& setImageOpaqueBindCount( uint32_t imageOpaqueBindCount_ )
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    BindSparseInfo & setBufferBinds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo> const & bufferBinds_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bufferBindCount = static_cast<uint32_t>( bufferBinds_.size() );
+      pBufferBinds = bufferBinds_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    BindSparseInfo & setImageOpaqueBindCount( uint32_t imageOpaqueBindCount_ ) VULKAN_HPP_NOEXCEPT
     {
       imageOpaqueBindCount = imageOpaqueBindCount_;
       return *this;
     }
 
-    BindSparseInfo& setPImageOpaqueBinds( const SparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds_ )
+    BindSparseInfo & setPImageOpaqueBinds( const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds_ ) VULKAN_HPP_NOEXCEPT
     {
       pImageOpaqueBinds = pImageOpaqueBinds_;
       return *this;
     }
 
-    BindSparseInfo& setImageBindCount( uint32_t imageBindCount_ )
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    BindSparseInfo & setImageOpaqueBinds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo> const & imageOpaqueBinds_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageOpaqueBindCount = static_cast<uint32_t>( imageOpaqueBinds_.size() );
+      pImageOpaqueBinds = imageOpaqueBinds_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    BindSparseInfo & setImageBindCount( uint32_t imageBindCount_ ) VULKAN_HPP_NOEXCEPT
     {
       imageBindCount = imageBindCount_;
       return *this;
     }
 
-    BindSparseInfo& setPImageBinds( const SparseImageMemoryBindInfo* pImageBinds_ )
+    BindSparseInfo & setPImageBinds( const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo* pImageBinds_ ) VULKAN_HPP_NOEXCEPT
     {
       pImageBinds = pImageBinds_;
       return *this;
     }
 
-    BindSparseInfo& setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ )
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    BindSparseInfo & setImageBinds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo> const & imageBinds_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageBindCount = static_cast<uint32_t>( imageBinds_.size() );
+      pImageBinds = imageBinds_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    BindSparseInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
     {
       signalSemaphoreCount = signalSemaphoreCount_;
       return *this;
     }
 
-    BindSparseInfo& setPSignalSemaphores( const Semaphore* pSignalSemaphores_ )
+    BindSparseInfo & setPSignalSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore* pSignalSemaphores_ ) VULKAN_HPP_NOEXCEPT
     {
       pSignalSemaphores = pSignalSemaphores_;
       return *this;
     }
 
-    operator VkBindSparseInfo const&() const
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    BindSparseInfo & setSignalSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & signalSemaphores_ ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkBindSparseInfo*>(this);
+      signalSemaphoreCount = static_cast<uint32_t>( signalSemaphores_.size() );
+      pSignalSemaphores = signalSemaphores_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkBindSparseInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBindSparseInfo*>( this );
     }
 
-    operator VkBindSparseInfo &()
+    operator VkBindSparseInfo &() VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkBindSparseInfo*>(this);
+      return *reinterpret_cast<VkBindSparseInfo*>( this );
     }
 
-    bool operator==( BindSparseInfo const& rhs ) const
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( BindSparseInfo const& ) const = default;
+#else
+    bool operator==( BindSparseInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
@@ -26658,124 +21495,1786 @@
           && ( pSignalSemaphores == rhs.pSignalSemaphores );
     }
 
-    bool operator!=( BindSparseInfo const& rhs ) const
+    bool operator!=( BindSparseInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
+#endif
 
-  private:
-    StructureType sType = StructureType::eBindSparseInfo;
+
 
   public:
-    const void* pNext = nullptr;
-    uint32_t waitSemaphoreCount;
-    const Semaphore* pWaitSemaphores;
-    uint32_t bufferBindCount;
-    const SparseBufferMemoryBindInfo* pBufferBinds;
-    uint32_t imageOpaqueBindCount;
-    const SparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds;
-    uint32_t imageBindCount;
-    const SparseImageMemoryBindInfo* pImageBinds;
-    uint32_t signalSemaphoreCount;
-    const Semaphore* pSignalSemaphores;
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindSparseInfo;
+    const void* pNext = {};
+    uint32_t waitSemaphoreCount = {};
+    const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores = {};
+    uint32_t bufferBindCount = {};
+    const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo* pBufferBinds = {};
+    uint32_t imageOpaqueBindCount = {};
+    const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds = {};
+    uint32_t imageBindCount = {};
+    const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo* pImageBinds = {};
+    uint32_t signalSemaphoreCount = {};
+    const VULKAN_HPP_NAMESPACE::Semaphore* pSignalSemaphores = {};
+
   };
   static_assert( sizeof( BindSparseInfo ) == sizeof( VkBindSparseInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<BindSparseInfo>::value, "struct wrapper is not a standard layout!" );
 
-  enum class PipelineStageFlagBits
+  template <>
+  struct CppType<StructureType, StructureType::eBindSparseInfo>
   {
-    eTopOfPipe = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
-    eDrawIndirect = VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT,
-    eVertexInput = VK_PIPELINE_STAGE_VERTEX_INPUT_BIT,
-    eVertexShader = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT,
-    eTessellationControlShader = VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT,
-    eTessellationEvaluationShader = VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT,
-    eGeometryShader = VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT,
-    eFragmentShader = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
-    eEarlyFragmentTests = VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT,
-    eLateFragmentTests = VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT,
-    eColorAttachmentOutput = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
-    eComputeShader = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
-    eTransfer = VK_PIPELINE_STAGE_TRANSFER_BIT,
-    eBottomOfPipe = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
-    eHost = VK_PIPELINE_STAGE_HOST_BIT,
-    eAllGraphics = VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT,
-    eAllCommands = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
-    eTransformFeedbackEXT = VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT,
-    eConditionalRenderingEXT = VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT,
-    eCommandProcessNVX = VK_PIPELINE_STAGE_COMMAND_PROCESS_BIT_NVX,
-    eShadingRateImageNV = VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV,
-    eRayTracingShaderNV = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV,
-    eAccelerationStructureBuildNV = VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV,
-    eTaskShaderNV = VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV,
-    eMeshShaderNV = VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV
+    using Type = BindSparseInfo;
   };
 
-  using PipelineStageFlags = Flags<PipelineStageFlagBits, VkPipelineStageFlags>;
-
-  VULKAN_HPP_INLINE PipelineStageFlags operator|( PipelineStageFlagBits bit0, PipelineStageFlagBits bit1 )
+  struct BindVertexBufferIndirectCommandNV
   {
-    return PipelineStageFlags( bit0 ) | bit1;
-  }
 
-  VULKAN_HPP_INLINE PipelineStageFlags operator~( PipelineStageFlagBits bits )
-  {
-    return ~( PipelineStageFlags( bits ) );
-  }
 
-  template <> struct FlagTraits<PipelineStageFlagBits>
-  {
-    enum
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR BindVertexBufferIndirectCommandNV(VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ = {}, uint32_t size_ = {}, uint32_t stride_ = {}) VULKAN_HPP_NOEXCEPT
+    : bufferAddress( bufferAddress_ ), size( size_ ), stride( stride_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR BindVertexBufferIndirectCommandNV( BindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    BindVertexBufferIndirectCommandNV( VkBindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      allFlags = VkFlags(PipelineStageFlagBits::eTopOfPipe) | VkFlags(PipelineStageFlagBits::eDrawIndirect) | VkFlags(PipelineStageFlagBits::eVertexInput) | VkFlags(PipelineStageFlagBits::eVertexShader) | VkFlags(PipelineStageFlagBits::eTessellationControlShader) | VkFlags(PipelineStageFlagBits::eTessellationEvaluationShader) | VkFlags(PipelineStageFlagBits::eGeometryShader) | VkFlags(PipelineStageFlagBits::eFragmentShader) | VkFlags(PipelineStageFlagBits::eEarlyFragmentTests) | VkFlags(PipelineStageFlagBits::eLateFragmentTests) | VkFlags(PipelineStageFlagBits::eColorAttachmentOutput) | VkFlags(PipelineStageFlagBits::eComputeShader) | VkFlags(PipelineStageFlagBits::eTransfer) | VkFlags(PipelineStageFlagBits::eBottomOfPipe) | VkFlags(PipelineStageFlagBits::eHost) | VkFlags(PipelineStageFlagBits::eAllGraphics) | VkFlags(PipelineStageFlagBits::eAllCommands) | VkFlags(PipelineStageFlagBits::eTransformFeedbackEXT) | VkFlags(PipelineStageFlagBits::eConditionalRenderingEXT) | VkFlags(PipelineStageFlagBits::eCommandProcessNVX) | VkFlags(PipelineStageFlagBits::eShadingRateImageNV) | VkFlags(PipelineStageFlagBits::eRayTracingShaderNV) | VkFlags(PipelineStageFlagBits::eAccelerationStructureBuildNV) | VkFlags(PipelineStageFlagBits::eTaskShaderNV) | VkFlags(PipelineStageFlagBits::eMeshShaderNV)
-    };
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    BindVertexBufferIndirectCommandNV & operator=( VkBindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindVertexBufferIndirectCommandNV const *>( &rhs );
+      return *this;
+    }
+
+    BindVertexBufferIndirectCommandNV & operator=( BindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( BindVertexBufferIndirectCommandNV ) );
+      return *this;
+    }
+
+    BindVertexBufferIndirectCommandNV & setBufferAddress( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bufferAddress = bufferAddress_;
+      return *this;
+    }
+
+    BindVertexBufferIndirectCommandNV & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT
+    {
+      size = size_;
+      return *this;
+    }
+
+    BindVertexBufferIndirectCommandNV & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stride = stride_;
+      return *this;
+    }
+
+
+    operator VkBindVertexBufferIndirectCommandNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBindVertexBufferIndirectCommandNV*>( this );
+    }
+
+    operator VkBindVertexBufferIndirectCommandNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBindVertexBufferIndirectCommandNV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( BindVertexBufferIndirectCommandNV const& ) const = default;
+#else
+    bool operator==( BindVertexBufferIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( bufferAddress == rhs.bufferAddress )
+          && ( size == rhs.size )
+          && ( stride == rhs.stride );
+    }
+
+    bool operator!=( BindVertexBufferIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress = {};
+    uint32_t size = {};
+    uint32_t stride = {};
+
   };
+  static_assert( sizeof( BindVertexBufferIndirectCommandNV ) == sizeof( VkBindVertexBufferIndirectCommandNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<BindVertexBufferIndirectCommandNV>::value, "struct wrapper is not a standard layout!" );
 
-  struct QueueFamilyCheckpointPropertiesNV
+  struct ImageSubresourceLayers
   {
-    operator VkQueueFamilyCheckpointPropertiesNV const&() const
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ImageSubresourceLayers(VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, uint32_t mipLevel_ = {}, uint32_t baseArrayLayer_ = {}, uint32_t layerCount_ = {}) VULKAN_HPP_NOEXCEPT
+    : aspectMask( aspectMask_ ), mipLevel( mipLevel_ ), baseArrayLayer( baseArrayLayer_ ), layerCount( layerCount_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImageSubresourceLayers( ImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImageSubresourceLayers( VkImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkQueueFamilyCheckpointPropertiesNV*>(this);
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ImageSubresourceLayers & operator=( VkImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const *>( &rhs );
+      return *this;
     }
 
-    operator VkQueueFamilyCheckpointPropertiesNV &()
+    ImageSubresourceLayers & operator=( ImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkQueueFamilyCheckpointPropertiesNV*>(this);
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ImageSubresourceLayers ) );
+      return *this;
     }
 
-    bool operator==( QueueFamilyCheckpointPropertiesNV const& rhs ) const
+    ImageSubresourceLayers & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      aspectMask = aspectMask_;
+      return *this;
+    }
+
+    ImageSubresourceLayers & setMipLevel( uint32_t mipLevel_ ) VULKAN_HPP_NOEXCEPT
+    {
+      mipLevel = mipLevel_;
+      return *this;
+    }
+
+    ImageSubresourceLayers & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      baseArrayLayer = baseArrayLayer_;
+      return *this;
+    }
+
+    ImageSubresourceLayers & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      layerCount = layerCount_;
+      return *this;
+    }
+
+
+    operator VkImageSubresourceLayers const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageSubresourceLayers*>( this );
+    }
+
+    operator VkImageSubresourceLayers &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageSubresourceLayers*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ImageSubresourceLayers const& ) const = default;
+#else
+    bool operator==( ImageSubresourceLayers const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( aspectMask == rhs.aspectMask )
+          && ( mipLevel == rhs.mipLevel )
+          && ( baseArrayLayer == rhs.baseArrayLayer )
+          && ( layerCount == rhs.layerCount );
+    }
+
+    bool operator!=( ImageSubresourceLayers const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
+    uint32_t mipLevel = {};
+    uint32_t baseArrayLayer = {};
+    uint32_t layerCount = {};
+
+  };
+  static_assert( sizeof( ImageSubresourceLayers ) == sizeof( VkImageSubresourceLayers ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImageSubresourceLayers>::value, "struct wrapper is not a standard layout!" );
+
+  struct ImageBlit2KHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageBlit2KHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR_14 ImageBlit2KHR(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const& srcOffsets_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const& dstOffsets_ = {}) VULKAN_HPP_NOEXCEPT
+    : srcSubresource( srcSubresource_ ), srcOffsets( srcOffsets_ ), dstSubresource( dstSubresource_ ), dstOffsets( dstOffsets_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 ImageBlit2KHR( ImageBlit2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImageBlit2KHR( VkImageBlit2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ImageBlit2KHR & operator=( VkImageBlit2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageBlit2KHR const *>( &rhs );
+      return *this;
+    }
+
+    ImageBlit2KHR & operator=( ImageBlit2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ImageBlit2KHR ) );
+      return *this;
+    }
+
+    ImageBlit2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImageBlit2KHR & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcSubresource = srcSubresource_;
+      return *this;
+    }
+
+    ImageBlit2KHR & setSrcOffsets( std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const & srcOffsets_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcOffsets = srcOffsets_;
+      return *this;
+    }
+
+    ImageBlit2KHR & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstSubresource = dstSubresource_;
+      return *this;
+    }
+
+    ImageBlit2KHR & setDstOffsets( std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const & dstOffsets_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstOffsets = dstOffsets_;
+      return *this;
+    }
+
+
+    operator VkImageBlit2KHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageBlit2KHR*>( this );
+    }
+
+    operator VkImageBlit2KHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageBlit2KHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ImageBlit2KHR const& ) const = default;
+#else
+    bool operator==( ImageBlit2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
-          && ( checkpointExecutionStageMask == rhs.checkpointExecutionStageMask );
+          && ( srcSubresource == rhs.srcSubresource )
+          && ( srcOffsets == rhs.srcOffsets )
+          && ( dstSubresource == rhs.dstSubresource )
+          && ( dstOffsets == rhs.dstOffsets );
     }
 
-    bool operator!=( QueueFamilyCheckpointPropertiesNV const& rhs ) const
+    bool operator!=( ImageBlit2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
+#endif
 
-  private:
-    StructureType sType = StructureType::eQueueFamilyCheckpointPropertiesNV;
+
 
   public:
-    void* pNext = nullptr;
-    PipelineStageFlags checkpointExecutionStageMask;
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageBlit2KHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::Offset3D, 2> srcOffsets = {};
+    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::Offset3D, 2> dstOffsets = {};
+
   };
-  static_assert( sizeof( QueueFamilyCheckpointPropertiesNV ) == sizeof( VkQueueFamilyCheckpointPropertiesNV ), "struct and wrapper have different size!" );
+  static_assert( sizeof( ImageBlit2KHR ) == sizeof( VkImageBlit2KHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImageBlit2KHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eImageBlit2KHR>
+  {
+    using Type = ImageBlit2KHR;
+  };
+
+  struct BlitImageInfo2KHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBlitImageInfo2KHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2KHR(VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageBlit2KHR* pRegions_ = {}, VULKAN_HPP_NAMESPACE::Filter filter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest) VULKAN_HPP_NOEXCEPT
+    : srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( regionCount_ ), pRegions( pRegions_ ), filter( filter_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2KHR( BlitImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    BlitImageInfo2KHR( VkBlitImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    BlitImageInfo2KHR( VULKAN_HPP_NAMESPACE::Image srcImage_, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, VULKAN_HPP_NAMESPACE::Image dstImage_, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageBlit2KHR> const & regions_, VULKAN_HPP_NAMESPACE::Filter filter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest )
+    : srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() ), filter( filter_ )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    BlitImageInfo2KHR & operator=( VkBlitImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BlitImageInfo2KHR const *>( &rhs );
+      return *this;
+    }
+
+    BlitImageInfo2KHR & operator=( BlitImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( BlitImageInfo2KHR ) );
+      return *this;
+    }
+
+    BlitImageInfo2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    BlitImageInfo2KHR & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcImage = srcImage_;
+      return *this;
+    }
+
+    BlitImageInfo2KHR & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcImageLayout = srcImageLayout_;
+      return *this;
+    }
+
+    BlitImageInfo2KHR & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstImage = dstImage_;
+      return *this;
+    }
+
+    BlitImageInfo2KHR & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstImageLayout = dstImageLayout_;
+      return *this;
+    }
+
+    BlitImageInfo2KHR & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      regionCount = regionCount_;
+      return *this;
+    }
+
+    BlitImageInfo2KHR & setPRegions( const VULKAN_HPP_NAMESPACE::ImageBlit2KHR* pRegions_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pRegions = pRegions_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    BlitImageInfo2KHR & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageBlit2KHR> const & regions_ ) VULKAN_HPP_NOEXCEPT
+    {
+      regionCount = static_cast<uint32_t>( regions_.size() );
+      pRegions = regions_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    BlitImageInfo2KHR & setFilter( VULKAN_HPP_NAMESPACE::Filter filter_ ) VULKAN_HPP_NOEXCEPT
+    {
+      filter = filter_;
+      return *this;
+    }
+
+
+    operator VkBlitImageInfo2KHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBlitImageInfo2KHR*>( this );
+    }
+
+    operator VkBlitImageInfo2KHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBlitImageInfo2KHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( BlitImageInfo2KHR const& ) const = default;
+#else
+    bool operator==( BlitImageInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( srcImage == rhs.srcImage )
+          && ( srcImageLayout == rhs.srcImageLayout )
+          && ( dstImage == rhs.dstImage )
+          && ( dstImageLayout == rhs.dstImageLayout )
+          && ( regionCount == rhs.regionCount )
+          && ( pRegions == rhs.pRegions )
+          && ( filter == rhs.filter );
+    }
+
+    bool operator!=( BlitImageInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBlitImageInfo2KHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Image srcImage = {};
+    VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+    VULKAN_HPP_NAMESPACE::Image dstImage = {};
+    VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+    uint32_t regionCount = {};
+    const VULKAN_HPP_NAMESPACE::ImageBlit2KHR* pRegions = {};
+    VULKAN_HPP_NAMESPACE::Filter filter = VULKAN_HPP_NAMESPACE::Filter::eNearest;
+
+  };
+  static_assert( sizeof( BlitImageInfo2KHR ) == sizeof( VkBlitImageInfo2KHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<BlitImageInfo2KHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eBlitImageInfo2KHR>
+  {
+    using Type = BlitImageInfo2KHR;
+  };
+
+  struct BufferCopy
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR BufferCopy(VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}) VULKAN_HPP_NOEXCEPT
+    : srcOffset( srcOffset_ ), dstOffset( dstOffset_ ), size( size_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR BufferCopy( BufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    BufferCopy( VkBufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    BufferCopy & operator=( VkBufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferCopy const *>( &rhs );
+      return *this;
+    }
+
+    BufferCopy & operator=( BufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( BufferCopy ) );
+      return *this;
+    }
+
+    BufferCopy & setSrcOffset( VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcOffset = srcOffset_;
+      return *this;
+    }
+
+    BufferCopy & setDstOffset( VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstOffset = dstOffset_;
+      return *this;
+    }
+
+    BufferCopy & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
+    {
+      size = size_;
+      return *this;
+    }
+
+
+    operator VkBufferCopy const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBufferCopy*>( this );
+    }
+
+    operator VkBufferCopy &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBufferCopy*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( BufferCopy const& ) const = default;
+#else
+    bool operator==( BufferCopy const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( srcOffset == rhs.srcOffset )
+          && ( dstOffset == rhs.dstOffset )
+          && ( size == rhs.size );
+    }
+
+    bool operator!=( BufferCopy const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::DeviceSize srcOffset = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize dstOffset = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize size = {};
+
+  };
+  static_assert( sizeof( BufferCopy ) == sizeof( VkBufferCopy ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<BufferCopy>::value, "struct wrapper is not a standard layout!" );
+
+  struct BufferCopy2KHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCopy2KHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR BufferCopy2KHR(VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}) VULKAN_HPP_NOEXCEPT
+    : srcOffset( srcOffset_ ), dstOffset( dstOffset_ ), size( size_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR BufferCopy2KHR( BufferCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    BufferCopy2KHR( VkBufferCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    BufferCopy2KHR & operator=( VkBufferCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferCopy2KHR const *>( &rhs );
+      return *this;
+    }
+
+    BufferCopy2KHR & operator=( BufferCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( BufferCopy2KHR ) );
+      return *this;
+    }
+
+    BufferCopy2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    BufferCopy2KHR & setSrcOffset( VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcOffset = srcOffset_;
+      return *this;
+    }
+
+    BufferCopy2KHR & setDstOffset( VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstOffset = dstOffset_;
+      return *this;
+    }
+
+    BufferCopy2KHR & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
+    {
+      size = size_;
+      return *this;
+    }
+
+
+    operator VkBufferCopy2KHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBufferCopy2KHR*>( this );
+    }
+
+    operator VkBufferCopy2KHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBufferCopy2KHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( BufferCopy2KHR const& ) const = default;
+#else
+    bool operator==( BufferCopy2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( srcOffset == rhs.srcOffset )
+          && ( dstOffset == rhs.dstOffset )
+          && ( size == rhs.size );
+    }
+
+    bool operator!=( BufferCopy2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCopy2KHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize srcOffset = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize dstOffset = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize size = {};
+
+  };
+  static_assert( sizeof( BufferCopy2KHR ) == sizeof( VkBufferCopy2KHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<BufferCopy2KHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eBufferCopy2KHR>
+  {
+    using Type = BufferCopy2KHR;
+  };
+
+  struct BufferCreateInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR BufferCreateInfo(VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ = {}, VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, uint32_t queueFamilyIndexCount_ = {}, const uint32_t* pQueueFamilyIndices_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), size( size_ ), usage( usage_ ), sharingMode( sharingMode_ ), queueFamilyIndexCount( queueFamilyIndexCount_ ), pQueueFamilyIndices( pQueueFamilyIndices_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR BufferCreateInfo( BufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    BufferCreateInfo( VkBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    BufferCreateInfo( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_, VULKAN_HPP_NAMESPACE::DeviceSize size_, VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_, VULKAN_HPP_NAMESPACE::SharingMode sharingMode_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_ )
+    : flags( flags_ ), size( size_ ), usage( usage_ ), sharingMode( sharingMode_ ), queueFamilyIndexCount( static_cast<uint32_t>( queueFamilyIndices_.size() ) ), pQueueFamilyIndices( queueFamilyIndices_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    BufferCreateInfo & operator=( VkBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+    BufferCreateInfo & operator=( BufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( BufferCreateInfo ) );
+      return *this;
+    }
+
+    BufferCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    BufferCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    BufferCreateInfo & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
+    {
+      size = size_;
+      return *this;
+    }
+
+    BufferCreateInfo & setUsage( VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      usage = usage_;
+      return *this;
+    }
+
+    BufferCreateInfo & setSharingMode( VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sharingMode = sharingMode_;
+      return *this;
+    }
+
+    BufferCreateInfo & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queueFamilyIndexCount = queueFamilyIndexCount_;
+      return *this;
+    }
+
+    BufferCreateInfo & setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pQueueFamilyIndices = pQueueFamilyIndices_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    BufferCreateInfo & setQueueFamilyIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queueFamilyIndexCount = static_cast<uint32_t>( queueFamilyIndices_.size() );
+      pQueueFamilyIndices = queueFamilyIndices_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkBufferCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBufferCreateInfo*>( this );
+    }
+
+    operator VkBufferCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBufferCreateInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( BufferCreateInfo const& ) const = default;
+#else
+    bool operator==( BufferCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( size == rhs.size )
+          && ( usage == rhs.usage )
+          && ( sharingMode == rhs.sharingMode )
+          && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount )
+          && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices );
+    }
+
+    bool operator!=( BufferCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCreateInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::BufferCreateFlags flags = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize size = {};
+    VULKAN_HPP_NAMESPACE::BufferUsageFlags usage = {};
+    VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive;
+    uint32_t queueFamilyIndexCount = {};
+    const uint32_t* pQueueFamilyIndices = {};
+
+  };
+  static_assert( sizeof( BufferCreateInfo ) == sizeof( VkBufferCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<BufferCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eBufferCreateInfo>
+  {
+    using Type = BufferCreateInfo;
+  };
+
+  struct BufferDeviceAddressCreateInfoEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferDeviceAddressCreateInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR BufferDeviceAddressCreateInfoEXT(VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}) VULKAN_HPP_NOEXCEPT
+    : deviceAddress( deviceAddress_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR BufferDeviceAddressCreateInfoEXT( BufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    BufferDeviceAddressCreateInfoEXT( VkBufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    BufferDeviceAddressCreateInfoEXT & operator=( VkBufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferDeviceAddressCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+    BufferDeviceAddressCreateInfoEXT & operator=( BufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( BufferDeviceAddressCreateInfoEXT ) );
+      return *this;
+    }
+
+    BufferDeviceAddressCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    BufferDeviceAddressCreateInfoEXT & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
+    {
+      deviceAddress = deviceAddress_;
+      return *this;
+    }
+
+
+    operator VkBufferDeviceAddressCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBufferDeviceAddressCreateInfoEXT*>( this );
+    }
+
+    operator VkBufferDeviceAddressCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBufferDeviceAddressCreateInfoEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( BufferDeviceAddressCreateInfoEXT const& ) const = default;
+#else
+    bool operator==( BufferDeviceAddressCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( deviceAddress == rhs.deviceAddress );
+    }
+
+    bool operator!=( BufferDeviceAddressCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferDeviceAddressCreateInfoEXT;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {};
+
+  };
+  static_assert( sizeof( BufferDeviceAddressCreateInfoEXT ) == sizeof( VkBufferDeviceAddressCreateInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<BufferDeviceAddressCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eBufferDeviceAddressCreateInfoEXT>
+  {
+    using Type = BufferDeviceAddressCreateInfoEXT;
+  };
+
+  struct BufferDeviceAddressInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferDeviceAddressInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR BufferDeviceAddressInfo(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}) VULKAN_HPP_NOEXCEPT
+    : buffer( buffer_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR BufferDeviceAddressInfo( BufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    BufferDeviceAddressInfo( VkBufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    BufferDeviceAddressInfo & operator=( VkBufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo const *>( &rhs );
+      return *this;
+    }
+
+    BufferDeviceAddressInfo & operator=( BufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( BufferDeviceAddressInfo ) );
+      return *this;
+    }
+
+    BufferDeviceAddressInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    BufferDeviceAddressInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      buffer = buffer_;
+      return *this;
+    }
+
+
+    operator VkBufferDeviceAddressInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBufferDeviceAddressInfo*>( this );
+    }
+
+    operator VkBufferDeviceAddressInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBufferDeviceAddressInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( BufferDeviceAddressInfo const& ) const = default;
+#else
+    bool operator==( BufferDeviceAddressInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( buffer == rhs.buffer );
+    }
+
+    bool operator!=( BufferDeviceAddressInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferDeviceAddressInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Buffer buffer = {};
+
+  };
+  static_assert( sizeof( BufferDeviceAddressInfo ) == sizeof( VkBufferDeviceAddressInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<BufferDeviceAddressInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eBufferDeviceAddressInfo>
+  {
+    using Type = BufferDeviceAddressInfo;
+  };
+  using BufferDeviceAddressInfoEXT = BufferDeviceAddressInfo;
+  using BufferDeviceAddressInfoKHR = BufferDeviceAddressInfo;
+
+  struct BufferImageCopy
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR BufferImageCopy(VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ = {}, uint32_t bufferRowLength_ = {}, uint32_t bufferImageHeight_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {}) VULKAN_HPP_NOEXCEPT
+    : bufferOffset( bufferOffset_ ), bufferRowLength( bufferRowLength_ ), bufferImageHeight( bufferImageHeight_ ), imageSubresource( imageSubresource_ ), imageOffset( imageOffset_ ), imageExtent( imageExtent_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR BufferImageCopy( BufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    BufferImageCopy( VkBufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    BufferImageCopy & operator=( VkBufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferImageCopy const *>( &rhs );
+      return *this;
+    }
+
+    BufferImageCopy & operator=( BufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( BufferImageCopy ) );
+      return *this;
+    }
+
+    BufferImageCopy & setBufferOffset( VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bufferOffset = bufferOffset_;
+      return *this;
+    }
+
+    BufferImageCopy & setBufferRowLength( uint32_t bufferRowLength_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bufferRowLength = bufferRowLength_;
+      return *this;
+    }
+
+    BufferImageCopy & setBufferImageHeight( uint32_t bufferImageHeight_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bufferImageHeight = bufferImageHeight_;
+      return *this;
+    }
+
+    BufferImageCopy & setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageSubresource = imageSubresource_;
+      return *this;
+    }
+
+    BufferImageCopy & setImageOffset( VULKAN_HPP_NAMESPACE::Offset3D const & imageOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageOffset = imageOffset_;
+      return *this;
+    }
+
+    BufferImageCopy & setImageExtent( VULKAN_HPP_NAMESPACE::Extent3D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageExtent = imageExtent_;
+      return *this;
+    }
+
+
+    operator VkBufferImageCopy const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBufferImageCopy*>( this );
+    }
+
+    operator VkBufferImageCopy &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBufferImageCopy*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( BufferImageCopy const& ) const = default;
+#else
+    bool operator==( BufferImageCopy const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( bufferOffset == rhs.bufferOffset )
+          && ( bufferRowLength == rhs.bufferRowLength )
+          && ( bufferImageHeight == rhs.bufferImageHeight )
+          && ( imageSubresource == rhs.imageSubresource )
+          && ( imageOffset == rhs.imageOffset )
+          && ( imageExtent == rhs.imageExtent );
+    }
+
+    bool operator!=( BufferImageCopy const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset = {};
+    uint32_t bufferRowLength = {};
+    uint32_t bufferImageHeight = {};
+    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource = {};
+    VULKAN_HPP_NAMESPACE::Offset3D imageOffset = {};
+    VULKAN_HPP_NAMESPACE::Extent3D imageExtent = {};
+
+  };
+  static_assert( sizeof( BufferImageCopy ) == sizeof( VkBufferImageCopy ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<BufferImageCopy>::value, "struct wrapper is not a standard layout!" );
+
+  struct BufferImageCopy2KHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferImageCopy2KHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR BufferImageCopy2KHR(VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ = {}, uint32_t bufferRowLength_ = {}, uint32_t bufferImageHeight_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {}) VULKAN_HPP_NOEXCEPT
+    : bufferOffset( bufferOffset_ ), bufferRowLength( bufferRowLength_ ), bufferImageHeight( bufferImageHeight_ ), imageSubresource( imageSubresource_ ), imageOffset( imageOffset_ ), imageExtent( imageExtent_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR BufferImageCopy2KHR( BufferImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    BufferImageCopy2KHR( VkBufferImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    BufferImageCopy2KHR & operator=( VkBufferImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR const *>( &rhs );
+      return *this;
+    }
+
+    BufferImageCopy2KHR & operator=( BufferImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( BufferImageCopy2KHR ) );
+      return *this;
+    }
+
+    BufferImageCopy2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    BufferImageCopy2KHR & setBufferOffset( VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bufferOffset = bufferOffset_;
+      return *this;
+    }
+
+    BufferImageCopy2KHR & setBufferRowLength( uint32_t bufferRowLength_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bufferRowLength = bufferRowLength_;
+      return *this;
+    }
+
+    BufferImageCopy2KHR & setBufferImageHeight( uint32_t bufferImageHeight_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bufferImageHeight = bufferImageHeight_;
+      return *this;
+    }
+
+    BufferImageCopy2KHR & setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageSubresource = imageSubresource_;
+      return *this;
+    }
+
+    BufferImageCopy2KHR & setImageOffset( VULKAN_HPP_NAMESPACE::Offset3D const & imageOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageOffset = imageOffset_;
+      return *this;
+    }
+
+    BufferImageCopy2KHR & setImageExtent( VULKAN_HPP_NAMESPACE::Extent3D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageExtent = imageExtent_;
+      return *this;
+    }
+
+
+    operator VkBufferImageCopy2KHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBufferImageCopy2KHR*>( this );
+    }
+
+    operator VkBufferImageCopy2KHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBufferImageCopy2KHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( BufferImageCopy2KHR const& ) const = default;
+#else
+    bool operator==( BufferImageCopy2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( bufferOffset == rhs.bufferOffset )
+          && ( bufferRowLength == rhs.bufferRowLength )
+          && ( bufferImageHeight == rhs.bufferImageHeight )
+          && ( imageSubresource == rhs.imageSubresource )
+          && ( imageOffset == rhs.imageOffset )
+          && ( imageExtent == rhs.imageExtent );
+    }
+
+    bool operator!=( BufferImageCopy2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferImageCopy2KHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset = {};
+    uint32_t bufferRowLength = {};
+    uint32_t bufferImageHeight = {};
+    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource = {};
+    VULKAN_HPP_NAMESPACE::Offset3D imageOffset = {};
+    VULKAN_HPP_NAMESPACE::Extent3D imageExtent = {};
+
+  };
+  static_assert( sizeof( BufferImageCopy2KHR ) == sizeof( VkBufferImageCopy2KHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<BufferImageCopy2KHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eBufferImageCopy2KHR>
+  {
+    using Type = BufferImageCopy2KHR;
+  };
+
+  struct BufferMemoryBarrier
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferMemoryBarrier;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR BufferMemoryBarrier(VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, uint32_t srcQueueFamilyIndex_ = {}, uint32_t dstQueueFamilyIndex_ = {}, VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}) VULKAN_HPP_NOEXCEPT
+    : srcAccessMask( srcAccessMask_ ), dstAccessMask( dstAccessMask_ ), srcQueueFamilyIndex( srcQueueFamilyIndex_ ), dstQueueFamilyIndex( dstQueueFamilyIndex_ ), buffer( buffer_ ), offset( offset_ ), size( size_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR BufferMemoryBarrier( BufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    BufferMemoryBarrier( VkBufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    BufferMemoryBarrier & operator=( VkBufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferMemoryBarrier const *>( &rhs );
+      return *this;
+    }
+
+    BufferMemoryBarrier & operator=( BufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( BufferMemoryBarrier ) );
+      return *this;
+    }
+
+    BufferMemoryBarrier & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    BufferMemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcAccessMask = srcAccessMask_;
+      return *this;
+    }
+
+    BufferMemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstAccessMask = dstAccessMask_;
+      return *this;
+    }
+
+    BufferMemoryBarrier & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcQueueFamilyIndex = srcQueueFamilyIndex_;
+      return *this;
+    }
+
+    BufferMemoryBarrier & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstQueueFamilyIndex = dstQueueFamilyIndex_;
+      return *this;
+    }
+
+    BufferMemoryBarrier & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      buffer = buffer_;
+      return *this;
+    }
+
+    BufferMemoryBarrier & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      offset = offset_;
+      return *this;
+    }
+
+    BufferMemoryBarrier & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
+    {
+      size = size_;
+      return *this;
+    }
+
+
+    operator VkBufferMemoryBarrier const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBufferMemoryBarrier*>( this );
+    }
+
+    operator VkBufferMemoryBarrier &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBufferMemoryBarrier*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( BufferMemoryBarrier const& ) const = default;
+#else
+    bool operator==( BufferMemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( srcAccessMask == rhs.srcAccessMask )
+          && ( dstAccessMask == rhs.dstAccessMask )
+          && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex )
+          && ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex )
+          && ( buffer == rhs.buffer )
+          && ( offset == rhs.offset )
+          && ( size == rhs.size );
+    }
+
+    bool operator!=( BufferMemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferMemoryBarrier;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {};
+    VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {};
+    uint32_t srcQueueFamilyIndex = {};
+    uint32_t dstQueueFamilyIndex = {};
+    VULKAN_HPP_NAMESPACE::Buffer buffer = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize size = {};
+
+  };
+  static_assert( sizeof( BufferMemoryBarrier ) == sizeof( VkBufferMemoryBarrier ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<BufferMemoryBarrier>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eBufferMemoryBarrier>
+  {
+    using Type = BufferMemoryBarrier;
+  };
+
+  struct BufferMemoryRequirementsInfo2
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferMemoryRequirementsInfo2;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR BufferMemoryRequirementsInfo2(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}) VULKAN_HPP_NOEXCEPT
+    : buffer( buffer_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR BufferMemoryRequirementsInfo2( BufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    BufferMemoryRequirementsInfo2( VkBufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    BufferMemoryRequirementsInfo2 & operator=( VkBufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 const *>( &rhs );
+      return *this;
+    }
+
+    BufferMemoryRequirementsInfo2 & operator=( BufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( BufferMemoryRequirementsInfo2 ) );
+      return *this;
+    }
+
+    BufferMemoryRequirementsInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    BufferMemoryRequirementsInfo2 & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      buffer = buffer_;
+      return *this;
+    }
+
+
+    operator VkBufferMemoryRequirementsInfo2 const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBufferMemoryRequirementsInfo2*>( this );
+    }
+
+    operator VkBufferMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBufferMemoryRequirementsInfo2*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( BufferMemoryRequirementsInfo2 const& ) const = default;
+#else
+    bool operator==( BufferMemoryRequirementsInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( buffer == rhs.buffer );
+    }
+
+    bool operator!=( BufferMemoryRequirementsInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferMemoryRequirementsInfo2;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Buffer buffer = {};
+
+  };
+  static_assert( sizeof( BufferMemoryRequirementsInfo2 ) == sizeof( VkBufferMemoryRequirementsInfo2 ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<BufferMemoryRequirementsInfo2>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eBufferMemoryRequirementsInfo2>
+  {
+    using Type = BufferMemoryRequirementsInfo2;
+  };
+  using BufferMemoryRequirementsInfo2KHR = BufferMemoryRequirementsInfo2;
+
+  struct BufferOpaqueCaptureAddressCreateInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferOpaqueCaptureAddressCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR BufferOpaqueCaptureAddressCreateInfo(uint64_t opaqueCaptureAddress_ = {}) VULKAN_HPP_NOEXCEPT
+    : opaqueCaptureAddress( opaqueCaptureAddress_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR BufferOpaqueCaptureAddressCreateInfo( BufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    BufferOpaqueCaptureAddressCreateInfo( VkBufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    BufferOpaqueCaptureAddressCreateInfo & operator=( VkBufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferOpaqueCaptureAddressCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+    BufferOpaqueCaptureAddressCreateInfo & operator=( BufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( BufferOpaqueCaptureAddressCreateInfo ) );
+      return *this;
+    }
+
+    BufferOpaqueCaptureAddressCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    BufferOpaqueCaptureAddressCreateInfo & setOpaqueCaptureAddress( uint64_t opaqueCaptureAddress_ ) VULKAN_HPP_NOEXCEPT
+    {
+      opaqueCaptureAddress = opaqueCaptureAddress_;
+      return *this;
+    }
+
+
+    operator VkBufferOpaqueCaptureAddressCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBufferOpaqueCaptureAddressCreateInfo*>( this );
+    }
+
+    operator VkBufferOpaqueCaptureAddressCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBufferOpaqueCaptureAddressCreateInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( BufferOpaqueCaptureAddressCreateInfo const& ) const = default;
+#else
+    bool operator==( BufferOpaqueCaptureAddressCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( opaqueCaptureAddress == rhs.opaqueCaptureAddress );
+    }
+
+    bool operator!=( BufferOpaqueCaptureAddressCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferOpaqueCaptureAddressCreateInfo;
+    const void* pNext = {};
+    uint64_t opaqueCaptureAddress = {};
+
+  };
+  static_assert( sizeof( BufferOpaqueCaptureAddressCreateInfo ) == sizeof( VkBufferOpaqueCaptureAddressCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<BufferOpaqueCaptureAddressCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eBufferOpaqueCaptureAddressCreateInfo>
+  {
+    using Type = BufferOpaqueCaptureAddressCreateInfo;
+  };
+  using BufferOpaqueCaptureAddressCreateInfoKHR = BufferOpaqueCaptureAddressCreateInfo;
+
+  struct BufferViewCreateInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferViewCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR BufferViewCreateInfo(VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize range_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), buffer( buffer_ ), format( format_ ), offset( offset_ ), range( range_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR BufferViewCreateInfo( BufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    BufferViewCreateInfo( VkBufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    BufferViewCreateInfo & operator=( VkBufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferViewCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+    BufferViewCreateInfo & operator=( BufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( BufferViewCreateInfo ) );
+      return *this;
+    }
+
+    BufferViewCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    BufferViewCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    BufferViewCreateInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      buffer = buffer_;
+      return *this;
+    }
+
+    BufferViewCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
+    {
+      format = format_;
+      return *this;
+    }
+
+    BufferViewCreateInfo & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      offset = offset_;
+      return *this;
+    }
+
+    BufferViewCreateInfo & setRange( VULKAN_HPP_NAMESPACE::DeviceSize range_ ) VULKAN_HPP_NOEXCEPT
+    {
+      range = range_;
+      return *this;
+    }
+
+
+    operator VkBufferViewCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBufferViewCreateInfo*>( this );
+    }
+
+    operator VkBufferViewCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBufferViewCreateInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( BufferViewCreateInfo const& ) const = default;
+#else
+    bool operator==( BufferViewCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( buffer == rhs.buffer )
+          && ( format == rhs.format )
+          && ( offset == rhs.offset )
+          && ( range == rhs.range );
+    }
+
+    bool operator!=( BufferViewCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferViewCreateInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags = {};
+    VULKAN_HPP_NAMESPACE::Buffer buffer = {};
+    VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+    VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize range = {};
+
+  };
+  static_assert( sizeof( BufferViewCreateInfo ) == sizeof( VkBufferViewCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<BufferViewCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eBufferViewCreateInfo>
+  {
+    using Type = BufferViewCreateInfo;
+  };
+
+  struct CalibratedTimestampInfoEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCalibratedTimestampInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR CalibratedTimestampInfoEXT(VULKAN_HPP_NAMESPACE::TimeDomainEXT timeDomain_ = VULKAN_HPP_NAMESPACE::TimeDomainEXT::eDevice) VULKAN_HPP_NOEXCEPT
+    : timeDomain( timeDomain_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR CalibratedTimestampInfoEXT( CalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    CalibratedTimestampInfoEXT( VkCalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    CalibratedTimestampInfoEXT & operator=( VkCalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+    CalibratedTimestampInfoEXT & operator=( CalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( CalibratedTimestampInfoEXT ) );
+      return *this;
+    }
+
+    CalibratedTimestampInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    CalibratedTimestampInfoEXT & setTimeDomain( VULKAN_HPP_NAMESPACE::TimeDomainEXT timeDomain_ ) VULKAN_HPP_NOEXCEPT
+    {
+      timeDomain = timeDomain_;
+      return *this;
+    }
+
+
+    operator VkCalibratedTimestampInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCalibratedTimestampInfoEXT*>( this );
+    }
+
+    operator VkCalibratedTimestampInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCalibratedTimestampInfoEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( CalibratedTimestampInfoEXT const& ) const = default;
+#else
+    bool operator==( CalibratedTimestampInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( timeDomain == rhs.timeDomain );
+    }
+
+    bool operator!=( CalibratedTimestampInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCalibratedTimestampInfoEXT;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::TimeDomainEXT timeDomain = VULKAN_HPP_NAMESPACE::TimeDomainEXT::eDevice;
+
+  };
+  static_assert( sizeof( CalibratedTimestampInfoEXT ) == sizeof( VkCalibratedTimestampInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<CalibratedTimestampInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eCalibratedTimestampInfoEXT>
+  {
+    using Type = CalibratedTimestampInfoEXT;
+  };
 
   struct CheckpointDataNV
   {
-    operator VkCheckpointDataNV const&() const
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCheckpointDataNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR CheckpointDataNV(VULKAN_HPP_NAMESPACE::PipelineStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::PipelineStageFlagBits::eTopOfPipe, void* pCheckpointMarker_ = {}) VULKAN_HPP_NOEXCEPT
+    : stage( stage_ ), pCheckpointMarker( pCheckpointMarker_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR CheckpointDataNV( CheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    CheckpointDataNV( VkCheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkCheckpointDataNV*>(this);
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    CheckpointDataNV & operator=( VkCheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CheckpointDataNV const *>( &rhs );
+      return *this;
     }
 
-    operator VkCheckpointDataNV &()
+    CheckpointDataNV & operator=( CheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkCheckpointDataNV*>(this);
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( CheckpointDataNV ) );
+      return *this;
     }
 
-    bool operator==( CheckpointDataNV const& rhs ) const
+
+    operator VkCheckpointDataNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCheckpointDataNV*>( this );
+    }
+
+    operator VkCheckpointDataNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCheckpointDataNV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( CheckpointDataNV const& ) const = default;
+#else
+    bool operator==( CheckpointDataNV const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
@@ -26783,96 +23282,1440 @@
           && ( pCheckpointMarker == rhs.pCheckpointMarker );
     }
 
-    bool operator!=( CheckpointDataNV const& rhs ) const
+    bool operator!=( CheckpointDataNV const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
+#endif
 
-  private:
-    StructureType sType = StructureType::eCheckpointDataNV;
+
 
   public:
-    void* pNext = nullptr;
-    PipelineStageFlagBits stage;
-    void* pCheckpointMarker;
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCheckpointDataNV;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineStageFlagBits stage = VULKAN_HPP_NAMESPACE::PipelineStageFlagBits::eTopOfPipe;
+    void* pCheckpointMarker = {};
+
   };
   static_assert( sizeof( CheckpointDataNV ) == sizeof( VkCheckpointDataNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<CheckpointDataNV>::value, "struct wrapper is not a standard layout!" );
 
-  enum class CommandPoolCreateFlagBits
+  template <>
+  struct CppType<StructureType, StructureType::eCheckpointDataNV>
   {
-    eTransient = VK_COMMAND_POOL_CREATE_TRANSIENT_BIT,
-    eResetCommandBuffer = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,
-    eProtected = VK_COMMAND_POOL_CREATE_PROTECTED_BIT
+    using Type = CheckpointDataNV;
   };
 
-  using CommandPoolCreateFlags = Flags<CommandPoolCreateFlagBits, VkCommandPoolCreateFlags>;
-
-  VULKAN_HPP_INLINE CommandPoolCreateFlags operator|( CommandPoolCreateFlagBits bit0, CommandPoolCreateFlagBits bit1 )
+  union ClearColorValue
   {
-    return CommandPoolCreateFlags( bit0 ) | bit1;
-  }
-
-  VULKAN_HPP_INLINE CommandPoolCreateFlags operator~( CommandPoolCreateFlagBits bits )
-  {
-    return ~( CommandPoolCreateFlags( bits ) );
-  }
-
-  template <> struct FlagTraits<CommandPoolCreateFlagBits>
-  {
-    enum
+    ClearColorValue( VULKAN_HPP_NAMESPACE::ClearColorValue const& rhs ) VULKAN_HPP_NOEXCEPT
     {
-      allFlags = VkFlags(CommandPoolCreateFlagBits::eTransient) | VkFlags(CommandPoolCreateFlagBits::eResetCommandBuffer) | VkFlags(CommandPoolCreateFlagBits::eProtected)
-    };
-  };
-
-  struct CommandPoolCreateInfo
-  {
-    CommandPoolCreateInfo( CommandPoolCreateFlags flags_ = CommandPoolCreateFlags(),
-                           uint32_t queueFamilyIndex_ = 0 )
-      : flags( flags_ )
-      , queueFamilyIndex( queueFamilyIndex_ )
-    {
+      memcpy( static_cast<void*>(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::ClearColorValue ) );
     }
 
-    CommandPoolCreateInfo( VkCommandPoolCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( CommandPoolCreateInfo ) );
-    }
+    ClearColorValue( const std::array<float,4>& float32_ = {} )
+      : float32( float32_ )
+    {}
 
-    CommandPoolCreateInfo& operator=( VkCommandPoolCreateInfo const & rhs )
+    ClearColorValue( const std::array<int32_t,4>& int32_ )
+      : int32( int32_ )
+    {}
+
+    ClearColorValue( const std::array<uint32_t,4>& uint32_ )
+      : uint32( uint32_ )
+    {}
+
+    ClearColorValue & setFloat32( std::array<float,4> float32_ ) VULKAN_HPP_NOEXCEPT
     {
-      memcpy( this, &rhs, sizeof( CommandPoolCreateInfo ) );
+      float32 = float32_;
       return *this;
     }
-    CommandPoolCreateInfo& setPNext( const void* pNext_ )
+
+    ClearColorValue & setInt32( std::array<int32_t,4> int32_ ) VULKAN_HPP_NOEXCEPT
+    {
+      int32 = int32_;
+      return *this;
+    }
+
+    ClearColorValue & setUint32( std::array<uint32_t,4> uint32_ ) VULKAN_HPP_NOEXCEPT
+    {
+      uint32 = uint32_;
+      return *this;
+    }
+
+    VULKAN_HPP_NAMESPACE::ClearColorValue & operator=( VULKAN_HPP_NAMESPACE::ClearColorValue const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void*>(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::ClearColorValue ) );
+      return *this;
+    }
+
+    operator VkClearColorValue const&() const
+    {
+      return *reinterpret_cast<const VkClearColorValue*>(this);
+    }
+
+    operator VkClearColorValue &()
+    {
+      return *reinterpret_cast<VkClearColorValue*>(this);
+    }
+
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 4> float32;
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<int32_t, 4> int32;
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 4> uint32;
+  };
+
+  struct ClearDepthStencilValue
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ClearDepthStencilValue(float depth_ = {}, uint32_t stencil_ = {}) VULKAN_HPP_NOEXCEPT
+    : depth( depth_ ), stencil( stencil_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ClearDepthStencilValue( ClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ClearDepthStencilValue( VkClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ClearDepthStencilValue & operator=( VkClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ClearDepthStencilValue const *>( &rhs );
+      return *this;
+    }
+
+    ClearDepthStencilValue & operator=( ClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ClearDepthStencilValue ) );
+      return *this;
+    }
+
+    ClearDepthStencilValue & setDepth( float depth_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depth = depth_;
+      return *this;
+    }
+
+    ClearDepthStencilValue & setStencil( uint32_t stencil_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stencil = stencil_;
+      return *this;
+    }
+
+
+    operator VkClearDepthStencilValue const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkClearDepthStencilValue*>( this );
+    }
+
+    operator VkClearDepthStencilValue &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkClearDepthStencilValue*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ClearDepthStencilValue const& ) const = default;
+#else
+    bool operator==( ClearDepthStencilValue const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( depth == rhs.depth )
+          && ( stencil == rhs.stencil );
+    }
+
+    bool operator!=( ClearDepthStencilValue const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    float depth = {};
+    uint32_t stencil = {};
+
+  };
+  static_assert( sizeof( ClearDepthStencilValue ) == sizeof( VkClearDepthStencilValue ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ClearDepthStencilValue>::value, "struct wrapper is not a standard layout!" );
+
+  union ClearValue
+  {
+    ClearValue( VULKAN_HPP_NAMESPACE::ClearValue const& rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void*>(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::ClearValue ) );
+    }
+
+    ClearValue( VULKAN_HPP_NAMESPACE::ClearColorValue color_ = {} )
+      : color( color_ )
+    {}
+
+    ClearValue( VULKAN_HPP_NAMESPACE::ClearDepthStencilValue depthStencil_ )
+      : depthStencil( depthStencil_ )
+    {}
+
+    ClearValue & setColor( VULKAN_HPP_NAMESPACE::ClearColorValue const & color_ ) VULKAN_HPP_NOEXCEPT
+    {
+      color = color_;
+      return *this;
+    }
+
+    ClearValue & setDepthStencil( VULKAN_HPP_NAMESPACE::ClearDepthStencilValue const & depthStencil_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depthStencil = depthStencil_;
+      return *this;
+    }
+
+    VULKAN_HPP_NAMESPACE::ClearValue & operator=( VULKAN_HPP_NAMESPACE::ClearValue const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void*>(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::ClearValue ) );
+      return *this;
+    }
+
+    operator VkClearValue const&() const
+    {
+      return *reinterpret_cast<const VkClearValue*>(this);
+    }
+
+    operator VkClearValue &()
+    {
+      return *reinterpret_cast<VkClearValue*>(this);
+    }
+
+#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
+    VULKAN_HPP_NAMESPACE::ClearColorValue color;
+    VULKAN_HPP_NAMESPACE::ClearDepthStencilValue depthStencil;
+#else
+    VkClearColorValue color;
+    VkClearDepthStencilValue depthStencil;
+#endif  /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
+  };
+
+  struct ClearAttachment
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    ClearAttachment(VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, uint32_t colorAttachment_ = {}, VULKAN_HPP_NAMESPACE::ClearValue clearValue_ = {}) VULKAN_HPP_NOEXCEPT
+    : aspectMask( aspectMask_ ), colorAttachment( colorAttachment_ ), clearValue( clearValue_ )
+    {}
+
+    ClearAttachment( ClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ClearAttachment( VkClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ClearAttachment & operator=( VkClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ClearAttachment const *>( &rhs );
+      return *this;
+    }
+
+    ClearAttachment & operator=( ClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ClearAttachment ) );
+      return *this;
+    }
+
+    ClearAttachment & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      aspectMask = aspectMask_;
+      return *this;
+    }
+
+    ClearAttachment & setColorAttachment( uint32_t colorAttachment_ ) VULKAN_HPP_NOEXCEPT
+    {
+      colorAttachment = colorAttachment_;
+      return *this;
+    }
+
+    ClearAttachment & setClearValue( VULKAN_HPP_NAMESPACE::ClearValue const & clearValue_ ) VULKAN_HPP_NOEXCEPT
+    {
+      clearValue = clearValue_;
+      return *this;
+    }
+
+
+    operator VkClearAttachment const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkClearAttachment*>( this );
+    }
+
+    operator VkClearAttachment &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkClearAttachment*>( this );
+    }
+
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
+    uint32_t colorAttachment = {};
+    VULKAN_HPP_NAMESPACE::ClearValue clearValue = {};
+
+  };
+  static_assert( sizeof( ClearAttachment ) == sizeof( VkClearAttachment ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ClearAttachment>::value, "struct wrapper is not a standard layout!" );
+
+  struct ClearRect
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ClearRect(VULKAN_HPP_NAMESPACE::Rect2D rect_ = {}, uint32_t baseArrayLayer_ = {}, uint32_t layerCount_ = {}) VULKAN_HPP_NOEXCEPT
+    : rect( rect_ ), baseArrayLayer( baseArrayLayer_ ), layerCount( layerCount_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ClearRect( ClearRect const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ClearRect( VkClearRect const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ClearRect & operator=( VkClearRect const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ClearRect const *>( &rhs );
+      return *this;
+    }
+
+    ClearRect & operator=( ClearRect const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ClearRect ) );
+      return *this;
+    }
+
+    ClearRect & setRect( VULKAN_HPP_NAMESPACE::Rect2D const & rect_ ) VULKAN_HPP_NOEXCEPT
+    {
+      rect = rect_;
+      return *this;
+    }
+
+    ClearRect & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      baseArrayLayer = baseArrayLayer_;
+      return *this;
+    }
+
+    ClearRect & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      layerCount = layerCount_;
+      return *this;
+    }
+
+
+    operator VkClearRect const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkClearRect*>( this );
+    }
+
+    operator VkClearRect &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkClearRect*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ClearRect const& ) const = default;
+#else
+    bool operator==( ClearRect const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( rect == rhs.rect )
+          && ( baseArrayLayer == rhs.baseArrayLayer )
+          && ( layerCount == rhs.layerCount );
+    }
+
+    bool operator!=( ClearRect const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::Rect2D rect = {};
+    uint32_t baseArrayLayer = {};
+    uint32_t layerCount = {};
+
+  };
+  static_assert( sizeof( ClearRect ) == sizeof( VkClearRect ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ClearRect>::value, "struct wrapper is not a standard layout!" );
+
+  struct CoarseSampleLocationNV
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR CoarseSampleLocationNV(uint32_t pixelX_ = {}, uint32_t pixelY_ = {}, uint32_t sample_ = {}) VULKAN_HPP_NOEXCEPT
+    : pixelX( pixelX_ ), pixelY( pixelY_ ), sample( sample_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR CoarseSampleLocationNV( CoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    CoarseSampleLocationNV( VkCoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    CoarseSampleLocationNV & operator=( VkCoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV const *>( &rhs );
+      return *this;
+    }
+
+    CoarseSampleLocationNV & operator=( CoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( CoarseSampleLocationNV ) );
+      return *this;
+    }
+
+    CoarseSampleLocationNV & setPixelX( uint32_t pixelX_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pixelX = pixelX_;
+      return *this;
+    }
+
+    CoarseSampleLocationNV & setPixelY( uint32_t pixelY_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pixelY = pixelY_;
+      return *this;
+    }
+
+    CoarseSampleLocationNV & setSample( uint32_t sample_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sample = sample_;
+      return *this;
+    }
+
+
+    operator VkCoarseSampleLocationNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCoarseSampleLocationNV*>( this );
+    }
+
+    operator VkCoarseSampleLocationNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCoarseSampleLocationNV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( CoarseSampleLocationNV const& ) const = default;
+#else
+    bool operator==( CoarseSampleLocationNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( pixelX == rhs.pixelX )
+          && ( pixelY == rhs.pixelY )
+          && ( sample == rhs.sample );
+    }
+
+    bool operator!=( CoarseSampleLocationNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    uint32_t pixelX = {};
+    uint32_t pixelY = {};
+    uint32_t sample = {};
+
+  };
+  static_assert( sizeof( CoarseSampleLocationNV ) == sizeof( VkCoarseSampleLocationNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<CoarseSampleLocationNV>::value, "struct wrapper is not a standard layout!" );
+
+  struct CoarseSampleOrderCustomNV
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR CoarseSampleOrderCustomNV(VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_ = VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV::eNoInvocations, uint32_t sampleCount_ = {}, uint32_t sampleLocationCount_ = {}, const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV* pSampleLocations_ = {}) VULKAN_HPP_NOEXCEPT
+    : shadingRate( shadingRate_ ), sampleCount( sampleCount_ ), sampleLocationCount( sampleLocationCount_ ), pSampleLocations( pSampleLocations_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR CoarseSampleOrderCustomNV( CoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    CoarseSampleOrderCustomNV( VkCoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    CoarseSampleOrderCustomNV( VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_, uint32_t sampleCount_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV> const & sampleLocations_ )
+    : shadingRate( shadingRate_ ), sampleCount( sampleCount_ ), sampleLocationCount( static_cast<uint32_t>( sampleLocations_.size() ) ), pSampleLocations( sampleLocations_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    CoarseSampleOrderCustomNV & operator=( VkCoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV const *>( &rhs );
+      return *this;
+    }
+
+    CoarseSampleOrderCustomNV & operator=( CoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( CoarseSampleOrderCustomNV ) );
+      return *this;
+    }
+
+    CoarseSampleOrderCustomNV & setShadingRate( VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shadingRate = shadingRate_;
+      return *this;
+    }
+
+    CoarseSampleOrderCustomNV & setSampleCount( uint32_t sampleCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sampleCount = sampleCount_;
+      return *this;
+    }
+
+    CoarseSampleOrderCustomNV & setSampleLocationCount( uint32_t sampleLocationCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sampleLocationCount = sampleLocationCount_;
+      return *this;
+    }
+
+    CoarseSampleOrderCustomNV & setPSampleLocations( const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV* pSampleLocations_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pSampleLocations = pSampleLocations_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    CoarseSampleOrderCustomNV & setSampleLocations( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV> const & sampleLocations_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sampleLocationCount = static_cast<uint32_t>( sampleLocations_.size() );
+      pSampleLocations = sampleLocations_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkCoarseSampleOrderCustomNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCoarseSampleOrderCustomNV*>( this );
+    }
+
+    operator VkCoarseSampleOrderCustomNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCoarseSampleOrderCustomNV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( CoarseSampleOrderCustomNV const& ) const = default;
+#else
+    bool operator==( CoarseSampleOrderCustomNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( shadingRate == rhs.shadingRate )
+          && ( sampleCount == rhs.sampleCount )
+          && ( sampleLocationCount == rhs.sampleLocationCount )
+          && ( pSampleLocations == rhs.pSampleLocations );
+    }
+
+    bool operator!=( CoarseSampleOrderCustomNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate = VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV::eNoInvocations;
+    uint32_t sampleCount = {};
+    uint32_t sampleLocationCount = {};
+    const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV* pSampleLocations = {};
+
+  };
+  static_assert( sizeof( CoarseSampleOrderCustomNV ) == sizeof( VkCoarseSampleOrderCustomNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<CoarseSampleOrderCustomNV>::value, "struct wrapper is not a standard layout!" );
+
+  class CommandPool
+  {
+  public:
+    using CType = VkCommandPool;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCommandPool;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandPool;
+
+  public:
+    VULKAN_HPP_CONSTEXPR CommandPool() VULKAN_HPP_NOEXCEPT
+      : m_commandPool(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR CommandPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_commandPool(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT CommandPool( VkCommandPool commandPool ) VULKAN_HPP_NOEXCEPT
+      : m_commandPool( commandPool )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    CommandPool & operator=(VkCommandPool commandPool) VULKAN_HPP_NOEXCEPT
+    {
+      m_commandPool = commandPool;
+      return *this;
+    }
+#endif
+
+    CommandPool & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_commandPool = VK_NULL_HANDLE;
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( CommandPool const& ) const = default;
+#else
+    bool operator==( CommandPool const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_commandPool == rhs.m_commandPool;
+    }
+
+    bool operator!=(CommandPool const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_commandPool != rhs.m_commandPool;
+    }
+
+    bool operator<(CommandPool const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_commandPool < rhs.m_commandPool;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCommandPool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_commandPool;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_commandPool != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_commandPool == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkCommandPool m_commandPool;
+  };
+  static_assert( sizeof( VULKAN_HPP_NAMESPACE::CommandPool ) == sizeof( VkCommandPool ), "handle and wrapper have different size!" );
+
+  template <>
+  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eCommandPool>
+  {
+    using type = VULKAN_HPP_NAMESPACE::CommandPool;
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eCommandPool>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::CommandPool;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandPool>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::CommandPool;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::CommandPool>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  struct CommandBufferAllocateInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferAllocateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR CommandBufferAllocateInfo(VULKAN_HPP_NAMESPACE::CommandPool commandPool_ = {}, VULKAN_HPP_NAMESPACE::CommandBufferLevel level_ = VULKAN_HPP_NAMESPACE::CommandBufferLevel::ePrimary, uint32_t commandBufferCount_ = {}) VULKAN_HPP_NOEXCEPT
+    : commandPool( commandPool_ ), level( level_ ), commandBufferCount( commandBufferCount_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR CommandBufferAllocateInfo( CommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    CommandBufferAllocateInfo( VkCommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    CommandBufferAllocateInfo & operator=( VkCommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo const *>( &rhs );
+      return *this;
+    }
+
+    CommandBufferAllocateInfo & operator=( CommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( CommandBufferAllocateInfo ) );
+      return *this;
+    }
+
+    CommandBufferAllocateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
       pNext = pNext_;
       return *this;
     }
 
-    CommandPoolCreateInfo& setFlags( CommandPoolCreateFlags flags_ )
+    CommandBufferAllocateInfo & setCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool_ ) VULKAN_HPP_NOEXCEPT
+    {
+      commandPool = commandPool_;
+      return *this;
+    }
+
+    CommandBufferAllocateInfo & setLevel( VULKAN_HPP_NAMESPACE::CommandBufferLevel level_ ) VULKAN_HPP_NOEXCEPT
+    {
+      level = level_;
+      return *this;
+    }
+
+    CommandBufferAllocateInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      commandBufferCount = commandBufferCount_;
+      return *this;
+    }
+
+
+    operator VkCommandBufferAllocateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCommandBufferAllocateInfo*>( this );
+    }
+
+    operator VkCommandBufferAllocateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCommandBufferAllocateInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( CommandBufferAllocateInfo const& ) const = default;
+#else
+    bool operator==( CommandBufferAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( commandPool == rhs.commandPool )
+          && ( level == rhs.level )
+          && ( commandBufferCount == rhs.commandBufferCount );
+    }
+
+    bool operator!=( CommandBufferAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferAllocateInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::CommandPool commandPool = {};
+    VULKAN_HPP_NAMESPACE::CommandBufferLevel level = VULKAN_HPP_NAMESPACE::CommandBufferLevel::ePrimary;
+    uint32_t commandBufferCount = {};
+
+  };
+  static_assert( sizeof( CommandBufferAllocateInfo ) == sizeof( VkCommandBufferAllocateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<CommandBufferAllocateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eCommandBufferAllocateInfo>
+  {
+    using Type = CommandBufferAllocateInfo;
+  };
+
+  class RenderPass
+  {
+  public:
+    using CType = VkRenderPass;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eRenderPass;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eRenderPass;
+
+  public:
+    VULKAN_HPP_CONSTEXPR RenderPass() VULKAN_HPP_NOEXCEPT
+      : m_renderPass(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR RenderPass( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_renderPass(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT RenderPass( VkRenderPass renderPass ) VULKAN_HPP_NOEXCEPT
+      : m_renderPass( renderPass )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    RenderPass & operator=(VkRenderPass renderPass) VULKAN_HPP_NOEXCEPT
+    {
+      m_renderPass = renderPass;
+      return *this;
+    }
+#endif
+
+    RenderPass & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_renderPass = VK_NULL_HANDLE;
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( RenderPass const& ) const = default;
+#else
+    bool operator==( RenderPass const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_renderPass == rhs.m_renderPass;
+    }
+
+    bool operator!=(RenderPass const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_renderPass != rhs.m_renderPass;
+    }
+
+    bool operator<(RenderPass const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_renderPass < rhs.m_renderPass;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkRenderPass() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_renderPass;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_renderPass != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_renderPass == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkRenderPass m_renderPass;
+  };
+  static_assert( sizeof( VULKAN_HPP_NAMESPACE::RenderPass ) == sizeof( VkRenderPass ), "handle and wrapper have different size!" );
+
+  template <>
+  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eRenderPass>
+  {
+    using type = VULKAN_HPP_NAMESPACE::RenderPass;
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eRenderPass>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::RenderPass;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eRenderPass>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::RenderPass;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::RenderPass>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  class Framebuffer
+  {
+  public:
+    using CType = VkFramebuffer;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eFramebuffer;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFramebuffer;
+
+  public:
+    VULKAN_HPP_CONSTEXPR Framebuffer() VULKAN_HPP_NOEXCEPT
+      : m_framebuffer(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR Framebuffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_framebuffer(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT Framebuffer( VkFramebuffer framebuffer ) VULKAN_HPP_NOEXCEPT
+      : m_framebuffer( framebuffer )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    Framebuffer & operator=(VkFramebuffer framebuffer) VULKAN_HPP_NOEXCEPT
+    {
+      m_framebuffer = framebuffer;
+      return *this;
+    }
+#endif
+
+    Framebuffer & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_framebuffer = VK_NULL_HANDLE;
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( Framebuffer const& ) const = default;
+#else
+    bool operator==( Framebuffer const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_framebuffer == rhs.m_framebuffer;
+    }
+
+    bool operator!=(Framebuffer const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_framebuffer != rhs.m_framebuffer;
+    }
+
+    bool operator<(Framebuffer const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_framebuffer < rhs.m_framebuffer;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkFramebuffer() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_framebuffer;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_framebuffer != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_framebuffer == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkFramebuffer m_framebuffer;
+  };
+  static_assert( sizeof( VULKAN_HPP_NAMESPACE::Framebuffer ) == sizeof( VkFramebuffer ), "handle and wrapper have different size!" );
+
+  template <>
+  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eFramebuffer>
+  {
+    using type = VULKAN_HPP_NAMESPACE::Framebuffer;
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eFramebuffer>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::Framebuffer;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFramebuffer>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::Framebuffer;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Framebuffer>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  struct CommandBufferInheritanceInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR CommandBufferInheritanceInfo(VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, uint32_t subpass_ = {}, VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ = {}, VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable_ = {}, VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags_ = {}, VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ = {}) VULKAN_HPP_NOEXCEPT
+    : renderPass( renderPass_ ), subpass( subpass_ ), framebuffer( framebuffer_ ), occlusionQueryEnable( occlusionQueryEnable_ ), queryFlags( queryFlags_ ), pipelineStatistics( pipelineStatistics_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR CommandBufferInheritanceInfo( CommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    CommandBufferInheritanceInfo( VkCommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    CommandBufferInheritanceInfo & operator=( VkCommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo const *>( &rhs );
+      return *this;
+    }
+
+    CommandBufferInheritanceInfo & operator=( CommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( CommandBufferInheritanceInfo ) );
+      return *this;
+    }
+
+    CommandBufferInheritanceInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    CommandBufferInheritanceInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
+    {
+      renderPass = renderPass_;
+      return *this;
+    }
+
+    CommandBufferInheritanceInfo & setSubpass( uint32_t subpass_ ) VULKAN_HPP_NOEXCEPT
+    {
+      subpass = subpass_;
+      return *this;
+    }
+
+    CommandBufferInheritanceInfo & setFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      framebuffer = framebuffer_;
+      return *this;
+    }
+
+    CommandBufferInheritanceInfo & setOcclusionQueryEnable( VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      occlusionQueryEnable = occlusionQueryEnable_;
+      return *this;
+    }
+
+    CommandBufferInheritanceInfo & setQueryFlags( VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queryFlags = queryFlags_;
+      return *this;
+    }
+
+    CommandBufferInheritanceInfo & setPipelineStatistics( VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipelineStatistics = pipelineStatistics_;
+      return *this;
+    }
+
+
+    operator VkCommandBufferInheritanceInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCommandBufferInheritanceInfo*>( this );
+    }
+
+    operator VkCommandBufferInheritanceInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCommandBufferInheritanceInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( CommandBufferInheritanceInfo const& ) const = default;
+#else
+    bool operator==( CommandBufferInheritanceInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( renderPass == rhs.renderPass )
+          && ( subpass == rhs.subpass )
+          && ( framebuffer == rhs.framebuffer )
+          && ( occlusionQueryEnable == rhs.occlusionQueryEnable )
+          && ( queryFlags == rhs.queryFlags )
+          && ( pipelineStatistics == rhs.pipelineStatistics );
+    }
+
+    bool operator!=( CommandBufferInheritanceInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::RenderPass renderPass = {};
+    uint32_t subpass = {};
+    VULKAN_HPP_NAMESPACE::Framebuffer framebuffer = {};
+    VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable = {};
+    VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags = {};
+    VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics = {};
+
+  };
+  static_assert( sizeof( CommandBufferInheritanceInfo ) == sizeof( VkCommandBufferInheritanceInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<CommandBufferInheritanceInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eCommandBufferInheritanceInfo>
+  {
+    using Type = CommandBufferInheritanceInfo;
+  };
+
+  struct CommandBufferBeginInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferBeginInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR CommandBufferBeginInfo(VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags_ = {}, const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo* pInheritanceInfo_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), pInheritanceInfo( pInheritanceInfo_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR CommandBufferBeginInfo( CommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    CommandBufferBeginInfo( VkCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    CommandBufferBeginInfo & operator=( VkCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo const *>( &rhs );
+      return *this;
+    }
+
+    CommandBufferBeginInfo & operator=( CommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( CommandBufferBeginInfo ) );
+      return *this;
+    }
+
+    CommandBufferBeginInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    CommandBufferBeginInfo & setFlags( VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags_ ) VULKAN_HPP_NOEXCEPT
     {
       flags = flags_;
       return *this;
     }
 
-    CommandPoolCreateInfo& setQueueFamilyIndex( uint32_t queueFamilyIndex_ )
+    CommandBufferBeginInfo & setPInheritanceInfo( const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo* pInheritanceInfo_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pInheritanceInfo = pInheritanceInfo_;
+      return *this;
+    }
+
+
+    operator VkCommandBufferBeginInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCommandBufferBeginInfo*>( this );
+    }
+
+    operator VkCommandBufferBeginInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCommandBufferBeginInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( CommandBufferBeginInfo const& ) const = default;
+#else
+    bool operator==( CommandBufferBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( pInheritanceInfo == rhs.pInheritanceInfo );
+    }
+
+    bool operator!=( CommandBufferBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferBeginInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags = {};
+    const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo* pInheritanceInfo = {};
+
+  };
+  static_assert( sizeof( CommandBufferBeginInfo ) == sizeof( VkCommandBufferBeginInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<CommandBufferBeginInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eCommandBufferBeginInfo>
+  {
+    using Type = CommandBufferBeginInfo;
+  };
+
+  struct CommandBufferInheritanceConditionalRenderingInfoEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR CommandBufferInheritanceConditionalRenderingInfoEXT(VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable_ = {}) VULKAN_HPP_NOEXCEPT
+    : conditionalRenderingEnable( conditionalRenderingEnable_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR CommandBufferInheritanceConditionalRenderingInfoEXT( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    CommandBufferInheritanceConditionalRenderingInfoEXT( VkCommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    CommandBufferInheritanceConditionalRenderingInfoEXT & operator=( VkCommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceConditionalRenderingInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+    CommandBufferInheritanceConditionalRenderingInfoEXT & operator=( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( CommandBufferInheritanceConditionalRenderingInfoEXT ) );
+      return *this;
+    }
+
+    CommandBufferInheritanceConditionalRenderingInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    CommandBufferInheritanceConditionalRenderingInfoEXT & setConditionalRenderingEnable( VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      conditionalRenderingEnable = conditionalRenderingEnable_;
+      return *this;
+    }
+
+
+    operator VkCommandBufferInheritanceConditionalRenderingInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCommandBufferInheritanceConditionalRenderingInfoEXT*>( this );
+    }
+
+    operator VkCommandBufferInheritanceConditionalRenderingInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCommandBufferInheritanceConditionalRenderingInfoEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( CommandBufferInheritanceConditionalRenderingInfoEXT const& ) const = default;
+#else
+    bool operator==( CommandBufferInheritanceConditionalRenderingInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( conditionalRenderingEnable == rhs.conditionalRenderingEnable );
+    }
+
+    bool operator!=( CommandBufferInheritanceConditionalRenderingInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable = {};
+
+  };
+  static_assert( sizeof( CommandBufferInheritanceConditionalRenderingInfoEXT ) == sizeof( VkCommandBufferInheritanceConditionalRenderingInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<CommandBufferInheritanceConditionalRenderingInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT>
+  {
+    using Type = CommandBufferInheritanceConditionalRenderingInfoEXT;
+  };
+
+  struct CommandBufferInheritanceRenderPassTransformInfoQCOM
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR CommandBufferInheritanceRenderPassTransformInfoQCOM(VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {}) VULKAN_HPP_NOEXCEPT
+    : transform( transform_ ), renderArea( renderArea_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR CommandBufferInheritanceRenderPassTransformInfoQCOM( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    CommandBufferInheritanceRenderPassTransformInfoQCOM( VkCommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    CommandBufferInheritanceRenderPassTransformInfoQCOM & operator=( VkCommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderPassTransformInfoQCOM const *>( &rhs );
+      return *this;
+    }
+
+    CommandBufferInheritanceRenderPassTransformInfoQCOM & operator=( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( CommandBufferInheritanceRenderPassTransformInfoQCOM ) );
+      return *this;
+    }
+
+    CommandBufferInheritanceRenderPassTransformInfoQCOM & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    CommandBufferInheritanceRenderPassTransformInfoQCOM & setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT
+    {
+      transform = transform_;
+      return *this;
+    }
+
+    CommandBufferInheritanceRenderPassTransformInfoQCOM & setRenderArea( VULKAN_HPP_NAMESPACE::Rect2D const & renderArea_ ) VULKAN_HPP_NOEXCEPT
+    {
+      renderArea = renderArea_;
+      return *this;
+    }
+
+
+    operator VkCommandBufferInheritanceRenderPassTransformInfoQCOM const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCommandBufferInheritanceRenderPassTransformInfoQCOM*>( this );
+    }
+
+    operator VkCommandBufferInheritanceRenderPassTransformInfoQCOM &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCommandBufferInheritanceRenderPassTransformInfoQCOM*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( CommandBufferInheritanceRenderPassTransformInfoQCOM const& ) const = default;
+#else
+    bool operator==( CommandBufferInheritanceRenderPassTransformInfoQCOM const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( transform == rhs.transform )
+          && ( renderArea == rhs.renderArea );
+    }
+
+    bool operator!=( CommandBufferInheritanceRenderPassTransformInfoQCOM const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
+    VULKAN_HPP_NAMESPACE::Rect2D renderArea = {};
+
+  };
+  static_assert( sizeof( CommandBufferInheritanceRenderPassTransformInfoQCOM ) == sizeof( VkCommandBufferInheritanceRenderPassTransformInfoQCOM ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<CommandBufferInheritanceRenderPassTransformInfoQCOM>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM>
+  {
+    using Type = CommandBufferInheritanceRenderPassTransformInfoQCOM;
+  };
+
+  struct CommandPoolCreateInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandPoolCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR CommandPoolCreateInfo(VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags_ = {}, uint32_t queueFamilyIndex_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), queueFamilyIndex( queueFamilyIndex_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR CommandPoolCreateInfo( CommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    CommandPoolCreateInfo( VkCommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    CommandPoolCreateInfo & operator=( VkCommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+    CommandPoolCreateInfo & operator=( CommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( CommandPoolCreateInfo ) );
+      return *this;
+    }
+
+    CommandPoolCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    CommandPoolCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    CommandPoolCreateInfo & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
     {
       queueFamilyIndex = queueFamilyIndex_;
       return *this;
     }
 
-    operator VkCommandPoolCreateInfo const&() const
+
+    operator VkCommandPoolCreateInfo const&() const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkCommandPoolCreateInfo*>(this);
+      return *reinterpret_cast<const VkCommandPoolCreateInfo*>( this );
     }
 
-    operator VkCommandPoolCreateInfo &()
+    operator VkCommandPoolCreateInfo &() VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkCommandPoolCreateInfo*>(this);
+      return *reinterpret_cast<VkCommandPoolCreateInfo*>( this );
     }
 
-    bool operator==( CommandPoolCreateInfo const& rhs ) const
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( CommandPoolCreateInfo const& ) const = default;
+#else
+    bool operator==( CommandPoolCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
@@ -26880,404 +24723,12523 @@
           && ( queueFamilyIndex == rhs.queueFamilyIndex );
     }
 
-    bool operator!=( CommandPoolCreateInfo const& rhs ) const
+    bool operator!=( CommandPoolCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandPoolCreateInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags = {};
+    uint32_t queueFamilyIndex = {};
+
+  };
+  static_assert( sizeof( CommandPoolCreateInfo ) == sizeof( VkCommandPoolCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<CommandPoolCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eCommandPoolCreateInfo>
+  {
+    using Type = CommandPoolCreateInfo;
+  };
+
+  class ShaderModule
+  {
+  public:
+    using CType = VkShaderModule;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eShaderModule;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eShaderModule;
+
+  public:
+    VULKAN_HPP_CONSTEXPR ShaderModule() VULKAN_HPP_NOEXCEPT
+      : m_shaderModule(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR ShaderModule( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_shaderModule(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT ShaderModule( VkShaderModule shaderModule ) VULKAN_HPP_NOEXCEPT
+      : m_shaderModule( shaderModule )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    ShaderModule & operator=(VkShaderModule shaderModule) VULKAN_HPP_NOEXCEPT
+    {
+      m_shaderModule = shaderModule;
+      return *this;
+    }
+#endif
+
+    ShaderModule & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_shaderModule = VK_NULL_HANDLE;
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ShaderModule const& ) const = default;
+#else
+    bool operator==( ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_shaderModule == rhs.m_shaderModule;
+    }
+
+    bool operator!=(ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_shaderModule != rhs.m_shaderModule;
+    }
+
+    bool operator<(ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_shaderModule < rhs.m_shaderModule;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkShaderModule() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_shaderModule;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_shaderModule != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_shaderModule == VK_NULL_HANDLE;
     }
 
   private:
-    StructureType sType = StructureType::eCommandPoolCreateInfo;
-
-  public:
-    const void* pNext = nullptr;
-    CommandPoolCreateFlags flags;
-    uint32_t queueFamilyIndex;
+    VkShaderModule m_shaderModule;
   };
-  static_assert( sizeof( CommandPoolCreateInfo ) == sizeof( VkCommandPoolCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( sizeof( VULKAN_HPP_NAMESPACE::ShaderModule ) == sizeof( VkShaderModule ), "handle and wrapper have different size!" );
 
-  enum class CommandPoolResetFlagBits
+  template <>
+  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eShaderModule>
   {
-    eReleaseResources = VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT
+    using type = VULKAN_HPP_NAMESPACE::ShaderModule;
   };
 
-  using CommandPoolResetFlags = Flags<CommandPoolResetFlagBits, VkCommandPoolResetFlags>;
-
-  VULKAN_HPP_INLINE CommandPoolResetFlags operator|( CommandPoolResetFlagBits bit0, CommandPoolResetFlagBits bit1 )
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eShaderModule>
   {
-    return CommandPoolResetFlags( bit0 ) | bit1;
-  }
+    using Type = VULKAN_HPP_NAMESPACE::ShaderModule;
+  };
 
-  VULKAN_HPP_INLINE CommandPoolResetFlags operator~( CommandPoolResetFlagBits bits )
-  {
-    return ~( CommandPoolResetFlags( bits ) );
-  }
 
-  template <> struct FlagTraits<CommandPoolResetFlagBits>
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eShaderModule>
   {
-    enum
+    using Type = VULKAN_HPP_NAMESPACE::ShaderModule;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::ShaderModule>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  struct SpecializationMapEntry
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR SpecializationMapEntry(uint32_t constantID_ = {}, uint32_t offset_ = {}, size_t size_ = {}) VULKAN_HPP_NOEXCEPT
+    : constantID( constantID_ ), offset( offset_ ), size( size_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SpecializationMapEntry( SpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SpecializationMapEntry( VkSpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      allFlags = VkFlags(CommandPoolResetFlagBits::eReleaseResources)
-    };
-  };
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-  enum class CommandBufferResetFlagBits
-  {
-    eReleaseResources = VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT
-  };
-
-  using CommandBufferResetFlags = Flags<CommandBufferResetFlagBits, VkCommandBufferResetFlags>;
-
-  VULKAN_HPP_INLINE CommandBufferResetFlags operator|( CommandBufferResetFlagBits bit0, CommandBufferResetFlagBits bit1 )
-  {
-    return CommandBufferResetFlags( bit0 ) | bit1;
-  }
-
-  VULKAN_HPP_INLINE CommandBufferResetFlags operator~( CommandBufferResetFlagBits bits )
-  {
-    return ~( CommandBufferResetFlags( bits ) );
-  }
-
-  template <> struct FlagTraits<CommandBufferResetFlagBits>
-  {
-    enum
+    SpecializationMapEntry & operator=( VkSpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      allFlags = VkFlags(CommandBufferResetFlagBits::eReleaseResources)
-    };
-  };
-
-  enum class SampleCountFlagBits
-  {
-    e1 = VK_SAMPLE_COUNT_1_BIT,
-    e2 = VK_SAMPLE_COUNT_2_BIT,
-    e4 = VK_SAMPLE_COUNT_4_BIT,
-    e8 = VK_SAMPLE_COUNT_8_BIT,
-    e16 = VK_SAMPLE_COUNT_16_BIT,
-    e32 = VK_SAMPLE_COUNT_32_BIT,
-    e64 = VK_SAMPLE_COUNT_64_BIT
-  };
-
-  using SampleCountFlags = Flags<SampleCountFlagBits, VkSampleCountFlags>;
-
-  VULKAN_HPP_INLINE SampleCountFlags operator|( SampleCountFlagBits bit0, SampleCountFlagBits bit1 )
-  {
-    return SampleCountFlags( bit0 ) | bit1;
-  }
-
-  VULKAN_HPP_INLINE SampleCountFlags operator~( SampleCountFlagBits bits )
-  {
-    return ~( SampleCountFlags( bits ) );
-  }
-
-  template <> struct FlagTraits<SampleCountFlagBits>
-  {
-    enum
-    {
-      allFlags = VkFlags(SampleCountFlagBits::e1) | VkFlags(SampleCountFlagBits::e2) | VkFlags(SampleCountFlagBits::e4) | VkFlags(SampleCountFlagBits::e8) | VkFlags(SampleCountFlagBits::e16) | VkFlags(SampleCountFlagBits::e32) | VkFlags(SampleCountFlagBits::e64)
-    };
-  };
-
-  struct ImageFormatProperties
-  {
-    operator VkImageFormatProperties const&() const
-    {
-      return *reinterpret_cast<const VkImageFormatProperties*>(this);
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SpecializationMapEntry const *>( &rhs );
+      return *this;
     }
 
-    operator VkImageFormatProperties &()
+    SpecializationMapEntry & operator=( SpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkImageFormatProperties*>(this);
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( SpecializationMapEntry ) );
+      return *this;
     }
 
-    bool operator==( ImageFormatProperties const& rhs ) const
+    SpecializationMapEntry & setConstantID( uint32_t constantID_ ) VULKAN_HPP_NOEXCEPT
     {
-      return ( maxExtent == rhs.maxExtent )
-          && ( maxMipLevels == rhs.maxMipLevels )
-          && ( maxArrayLayers == rhs.maxArrayLayers )
-          && ( sampleCounts == rhs.sampleCounts )
-          && ( maxResourceSize == rhs.maxResourceSize );
+      constantID = constantID_;
+      return *this;
     }
 
-    bool operator!=( ImageFormatProperties const& rhs ) const
+    SpecializationMapEntry & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      offset = offset_;
+      return *this;
+    }
+
+    SpecializationMapEntry & setSize( size_t size_ ) VULKAN_HPP_NOEXCEPT
+    {
+      size = size_;
+      return *this;
+    }
+
+
+    operator VkSpecializationMapEntry const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSpecializationMapEntry*>( this );
+    }
+
+    operator VkSpecializationMapEntry &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSpecializationMapEntry*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( SpecializationMapEntry const& ) const = default;
+#else
+    bool operator==( SpecializationMapEntry const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( constantID == rhs.constantID )
+          && ( offset == rhs.offset )
+          && ( size == rhs.size );
+    }
+
+    bool operator!=( SpecializationMapEntry const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
+#endif
 
-    Extent3D maxExtent;
-    uint32_t maxMipLevels;
-    uint32_t maxArrayLayers;
-    SampleCountFlags sampleCounts;
-    DeviceSize maxResourceSize;
+
+
+  public:
+    uint32_t constantID = {};
+    uint32_t offset = {};
+    size_t size = {};
+
   };
-  static_assert( sizeof( ImageFormatProperties ) == sizeof( VkImageFormatProperties ), "struct and wrapper have different size!" );
+  static_assert( sizeof( SpecializationMapEntry ) == sizeof( VkSpecializationMapEntry ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SpecializationMapEntry>::value, "struct wrapper is not a standard layout!" );
 
-  struct ImageCreateInfo
+  struct SpecializationInfo
   {
-    ImageCreateInfo( ImageCreateFlags flags_ = ImageCreateFlags(),
-                     ImageType imageType_ = ImageType::e1D,
-                     Format format_ = Format::eUndefined,
-                     Extent3D extent_ = Extent3D(),
-                     uint32_t mipLevels_ = 0,
-                     uint32_t arrayLayers_ = 0,
-                     SampleCountFlagBits samples_ = SampleCountFlagBits::e1,
-                     ImageTiling tiling_ = ImageTiling::eOptimal,
-                     ImageUsageFlags usage_ = ImageUsageFlags(),
-                     SharingMode sharingMode_ = SharingMode::eExclusive,
-                     uint32_t queueFamilyIndexCount_ = 0,
-                     const uint32_t* pQueueFamilyIndices_ = nullptr,
-                     ImageLayout initialLayout_ = ImageLayout::eUndefined )
-      : flags( flags_ )
-      , imageType( imageType_ )
-      , format( format_ )
-      , extent( extent_ )
-      , mipLevels( mipLevels_ )
-      , arrayLayers( arrayLayers_ )
-      , samples( samples_ )
-      , tiling( tiling_ )
-      , usage( usage_ )
-      , sharingMode( sharingMode_ )
-      , queueFamilyIndexCount( queueFamilyIndexCount_ )
-      , pQueueFamilyIndices( pQueueFamilyIndices_ )
-      , initialLayout( initialLayout_ )
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR SpecializationInfo(uint32_t mapEntryCount_ = {}, const VULKAN_HPP_NAMESPACE::SpecializationMapEntry* pMapEntries_ = {}, size_t dataSize_ = {}, const void* pData_ = {}) VULKAN_HPP_NOEXCEPT
+    : mapEntryCount( mapEntryCount_ ), pMapEntries( pMapEntries_ ), dataSize( dataSize_ ), pData( pData_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SpecializationInfo( SpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SpecializationInfo( VkSpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
     {
+      *this = rhs;
     }
 
-    ImageCreateInfo( VkImageCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ImageCreateInfo ) );
-    }
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    template <typename T>
+    SpecializationInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SpecializationMapEntry> const & mapEntries_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & data_ = {} )
+    : mapEntryCount( static_cast<uint32_t>( mapEntries_.size() ) ), pMapEntries( mapEntries_.data() ), dataSize( data_.size() * sizeof(T) ), pData( data_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    ImageCreateInfo& operator=( VkImageCreateInfo const & rhs )
+    SpecializationInfo & operator=( VkSpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      memcpy( this, &rhs, sizeof( ImageCreateInfo ) );
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SpecializationInfo const *>( &rhs );
       return *this;
     }
-    ImageCreateInfo& setPNext( const void* pNext_ )
+
+    SpecializationInfo & operator=( SpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( SpecializationInfo ) );
+      return *this;
+    }
+
+    SpecializationInfo & setMapEntryCount( uint32_t mapEntryCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      mapEntryCount = mapEntryCount_;
+      return *this;
+    }
+
+    SpecializationInfo & setPMapEntries( const VULKAN_HPP_NAMESPACE::SpecializationMapEntry* pMapEntries_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pMapEntries = pMapEntries_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    SpecializationInfo & setMapEntries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SpecializationMapEntry> const & mapEntries_ ) VULKAN_HPP_NOEXCEPT
+    {
+      mapEntryCount = static_cast<uint32_t>( mapEntries_.size() );
+      pMapEntries = mapEntries_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    SpecializationInfo & setDataSize( size_t dataSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dataSize = dataSize_;
+      return *this;
+    }
+
+    SpecializationInfo & setPData( const void* pData_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pData = pData_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    template <typename T>
+    SpecializationInfo & setData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & data_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dataSize = data_.size() * sizeof(T);
+      pData = data_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkSpecializationInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSpecializationInfo*>( this );
+    }
+
+    operator VkSpecializationInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSpecializationInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( SpecializationInfo const& ) const = default;
+#else
+    bool operator==( SpecializationInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( mapEntryCount == rhs.mapEntryCount )
+          && ( pMapEntries == rhs.pMapEntries )
+          && ( dataSize == rhs.dataSize )
+          && ( pData == rhs.pData );
+    }
+
+    bool operator!=( SpecializationInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    uint32_t mapEntryCount = {};
+    const VULKAN_HPP_NAMESPACE::SpecializationMapEntry* pMapEntries = {};
+    size_t dataSize = {};
+    const void* pData = {};
+
+  };
+  static_assert( sizeof( SpecializationInfo ) == sizeof( VkSpecializationInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SpecializationInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct PipelineShaderStageCreateInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineShaderStageCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateInfo(VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex, VULKAN_HPP_NAMESPACE::ShaderModule module_ = {}, const char* pName_ = {}, const VULKAN_HPP_NAMESPACE::SpecializationInfo* pSpecializationInfo_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), stage( stage_ ), module( module_ ), pName( pName_ ), pSpecializationInfo( pSpecializationInfo_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateInfo( PipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineShaderStageCreateInfo( VkPipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PipelineShaderStageCreateInfo & operator=( VkPipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+    PipelineShaderStageCreateInfo & operator=( PipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineShaderStageCreateInfo ) );
+      return *this;
+    }
+
+    PipelineShaderStageCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
       pNext = pNext_;
       return *this;
     }
 
-    ImageCreateInfo& setFlags( ImageCreateFlags flags_ )
+    PipelineShaderStageCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
     {
       flags = flags_;
       return *this;
     }
 
-    ImageCreateInfo& setImageType( ImageType imageType_ )
+    PipelineShaderStageCreateInfo & setStage( VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ ) VULKAN_HPP_NOEXCEPT
     {
-      imageType = imageType_;
+      stage = stage_;
       return *this;
     }
 
-    ImageCreateInfo& setFormat( Format format_ )
+    PipelineShaderStageCreateInfo & setModule( VULKAN_HPP_NAMESPACE::ShaderModule module_ ) VULKAN_HPP_NOEXCEPT
     {
-      format = format_;
+      module = module_;
       return *this;
     }
 
-    ImageCreateInfo& setExtent( Extent3D extent_ )
+    PipelineShaderStageCreateInfo & setPName( const char* pName_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pName = pName_;
+      return *this;
+    }
+
+    PipelineShaderStageCreateInfo & setPSpecializationInfo( const VULKAN_HPP_NAMESPACE::SpecializationInfo* pSpecializationInfo_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pSpecializationInfo = pSpecializationInfo_;
+      return *this;
+    }
+
+
+    operator VkPipelineShaderStageCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineShaderStageCreateInfo*>( this );
+    }
+
+    operator VkPipelineShaderStageCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineShaderStageCreateInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PipelineShaderStageCreateInfo const& ) const = default;
+#else
+    bool operator==( PipelineShaderStageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( stage == rhs.stage )
+          && ( module == rhs.module )
+          && ( pName == rhs.pName )
+          && ( pSpecializationInfo == rhs.pSpecializationInfo );
+    }
+
+    bool operator!=( PipelineShaderStageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageCreateInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags = {};
+    VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex;
+    VULKAN_HPP_NAMESPACE::ShaderModule module = {};
+    const char* pName = {};
+    const VULKAN_HPP_NAMESPACE::SpecializationInfo* pSpecializationInfo = {};
+
+  };
+  static_assert( sizeof( PipelineShaderStageCreateInfo ) == sizeof( VkPipelineShaderStageCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineShaderStageCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineShaderStageCreateInfo>
+  {
+    using Type = PipelineShaderStageCreateInfo;
+  };
+
+  class PipelineLayout
+  {
+  public:
+    using CType = VkPipelineLayout;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipelineLayout;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineLayout;
+
+  public:
+    VULKAN_HPP_CONSTEXPR PipelineLayout() VULKAN_HPP_NOEXCEPT
+      : m_pipelineLayout(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineLayout( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_pipelineLayout(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT PipelineLayout( VkPipelineLayout pipelineLayout ) VULKAN_HPP_NOEXCEPT
+      : m_pipelineLayout( pipelineLayout )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    PipelineLayout & operator=(VkPipelineLayout pipelineLayout) VULKAN_HPP_NOEXCEPT
+    {
+      m_pipelineLayout = pipelineLayout;
+      return *this;
+    }
+#endif
+
+    PipelineLayout & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_pipelineLayout = VK_NULL_HANDLE;
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PipelineLayout const& ) const = default;
+#else
+    bool operator==( PipelineLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_pipelineLayout == rhs.m_pipelineLayout;
+    }
+
+    bool operator!=(PipelineLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_pipelineLayout != rhs.m_pipelineLayout;
+    }
+
+    bool operator<(PipelineLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_pipelineLayout < rhs.m_pipelineLayout;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipelineLayout() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_pipelineLayout;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_pipelineLayout != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_pipelineLayout == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkPipelineLayout m_pipelineLayout;
+  };
+  static_assert( sizeof( VULKAN_HPP_NAMESPACE::PipelineLayout ) == sizeof( VkPipelineLayout ), "handle and wrapper have different size!" );
+
+  template <>
+  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::ePipelineLayout>
+  {
+    using type = VULKAN_HPP_NAMESPACE::PipelineLayout;
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::ePipelineLayout>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::PipelineLayout;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineLayout>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::PipelineLayout;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::PipelineLayout>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  class Pipeline
+  {
+  public:
+    using CType = VkPipeline;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipeline;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipeline;
+
+  public:
+    VULKAN_HPP_CONSTEXPR Pipeline() VULKAN_HPP_NOEXCEPT
+      : m_pipeline(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR Pipeline( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_pipeline(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT Pipeline( VkPipeline pipeline ) VULKAN_HPP_NOEXCEPT
+      : m_pipeline( pipeline )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    Pipeline & operator=(VkPipeline pipeline) VULKAN_HPP_NOEXCEPT
+    {
+      m_pipeline = pipeline;
+      return *this;
+    }
+#endif
+
+    Pipeline & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_pipeline = VK_NULL_HANDLE;
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( Pipeline const& ) const = default;
+#else
+    bool operator==( Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_pipeline == rhs.m_pipeline;
+    }
+
+    bool operator!=(Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_pipeline != rhs.m_pipeline;
+    }
+
+    bool operator<(Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_pipeline < rhs.m_pipeline;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipeline() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_pipeline;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_pipeline != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_pipeline == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkPipeline m_pipeline;
+  };
+  static_assert( sizeof( VULKAN_HPP_NAMESPACE::Pipeline ) == sizeof( VkPipeline ), "handle and wrapper have different size!" );
+
+  template <>
+  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::ePipeline>
+  {
+    using type = VULKAN_HPP_NAMESPACE::Pipeline;
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::ePipeline>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::Pipeline;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipeline>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::Pipeline;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Pipeline>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  struct ComputePipelineCreateInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eComputePipelineCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ComputePipelineCreateInfo(VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo stage_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), stage( stage_ ), layout( layout_ ), basePipelineHandle( basePipelineHandle_ ), basePipelineIndex( basePipelineIndex_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ComputePipelineCreateInfo( ComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ComputePipelineCreateInfo( VkComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ComputePipelineCreateInfo & operator=( VkComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+    ComputePipelineCreateInfo & operator=( ComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ComputePipelineCreateInfo ) );
+      return *this;
+    }
+
+    ComputePipelineCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ComputePipelineCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    ComputePipelineCreateInfo & setStage( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo const & stage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stage = stage_;
+      return *this;
+    }
+
+    ComputePipelineCreateInfo & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      layout = layout_;
+      return *this;
+    }
+
+    ComputePipelineCreateInfo & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT
+    {
+      basePipelineHandle = basePipelineHandle_;
+      return *this;
+    }
+
+    ComputePipelineCreateInfo & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      basePipelineIndex = basePipelineIndex_;
+      return *this;
+    }
+
+
+    operator VkComputePipelineCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkComputePipelineCreateInfo*>( this );
+    }
+
+    operator VkComputePipelineCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkComputePipelineCreateInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ComputePipelineCreateInfo const& ) const = default;
+#else
+    bool operator==( ComputePipelineCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( stage == rhs.stage )
+          && ( layout == rhs.layout )
+          && ( basePipelineHandle == rhs.basePipelineHandle )
+          && ( basePipelineIndex == rhs.basePipelineIndex );
+    }
+
+    bool operator!=( ComputePipelineCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eComputePipelineCreateInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {};
+    VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo stage = {};
+    VULKAN_HPP_NAMESPACE::PipelineLayout layout = {};
+    VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {};
+    int32_t basePipelineIndex = {};
+
+  };
+  static_assert( sizeof( ComputePipelineCreateInfo ) == sizeof( VkComputePipelineCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ComputePipelineCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eComputePipelineCreateInfo>
+  {
+    using Type = ComputePipelineCreateInfo;
+  };
+
+  struct ConditionalRenderingBeginInfoEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eConditionalRenderingBeginInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ConditionalRenderingBeginInfoEXT(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags_ = {}) VULKAN_HPP_NOEXCEPT
+    : buffer( buffer_ ), offset( offset_ ), flags( flags_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ConditionalRenderingBeginInfoEXT( ConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ConditionalRenderingBeginInfoEXT( VkConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ConditionalRenderingBeginInfoEXT & operator=( VkConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+    ConditionalRenderingBeginInfoEXT & operator=( ConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ConditionalRenderingBeginInfoEXT ) );
+      return *this;
+    }
+
+    ConditionalRenderingBeginInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ConditionalRenderingBeginInfoEXT & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      buffer = buffer_;
+      return *this;
+    }
+
+    ConditionalRenderingBeginInfoEXT & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      offset = offset_;
+      return *this;
+    }
+
+    ConditionalRenderingBeginInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+
+    operator VkConditionalRenderingBeginInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkConditionalRenderingBeginInfoEXT*>( this );
+    }
+
+    operator VkConditionalRenderingBeginInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkConditionalRenderingBeginInfoEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ConditionalRenderingBeginInfoEXT const& ) const = default;
+#else
+    bool operator==( ConditionalRenderingBeginInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( buffer == rhs.buffer )
+          && ( offset == rhs.offset )
+          && ( flags == rhs.flags );
+    }
+
+    bool operator!=( ConditionalRenderingBeginInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eConditionalRenderingBeginInfoEXT;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Buffer buffer = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
+    VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags = {};
+
+  };
+  static_assert( sizeof( ConditionalRenderingBeginInfoEXT ) == sizeof( VkConditionalRenderingBeginInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ConditionalRenderingBeginInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eConditionalRenderingBeginInfoEXT>
+  {
+    using Type = ConditionalRenderingBeginInfoEXT;
+  };
+
+  struct ConformanceVersion
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ConformanceVersion(uint8_t major_ = {}, uint8_t minor_ = {}, uint8_t subminor_ = {}, uint8_t patch_ = {}) VULKAN_HPP_NOEXCEPT
+    : major( major_ ), minor( minor_ ), subminor( subminor_ ), patch( patch_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ConformanceVersion( ConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ConformanceVersion( VkConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ConformanceVersion & operator=( VkConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ConformanceVersion const *>( &rhs );
+      return *this;
+    }
+
+    ConformanceVersion & operator=( ConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ConformanceVersion ) );
+      return *this;
+    }
+
+    ConformanceVersion & setMajor( uint8_t major_ ) VULKAN_HPP_NOEXCEPT
+    {
+      major = major_;
+      return *this;
+    }
+
+    ConformanceVersion & setMinor( uint8_t minor_ ) VULKAN_HPP_NOEXCEPT
+    {
+      minor = minor_;
+      return *this;
+    }
+
+    ConformanceVersion & setSubminor( uint8_t subminor_ ) VULKAN_HPP_NOEXCEPT
+    {
+      subminor = subminor_;
+      return *this;
+    }
+
+    ConformanceVersion & setPatch( uint8_t patch_ ) VULKAN_HPP_NOEXCEPT
+    {
+      patch = patch_;
+      return *this;
+    }
+
+
+    operator VkConformanceVersion const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkConformanceVersion*>( this );
+    }
+
+    operator VkConformanceVersion &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkConformanceVersion*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ConformanceVersion const& ) const = default;
+#else
+    bool operator==( ConformanceVersion const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( major == rhs.major )
+          && ( minor == rhs.minor )
+          && ( subminor == rhs.subminor )
+          && ( patch == rhs.patch );
+    }
+
+    bool operator!=( ConformanceVersion const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    uint8_t major = {};
+    uint8_t minor = {};
+    uint8_t subminor = {};
+    uint8_t patch = {};
+
+  };
+  static_assert( sizeof( ConformanceVersion ) == sizeof( VkConformanceVersion ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ConformanceVersion>::value, "struct wrapper is not a standard layout!" );
+  using ConformanceVersionKHR = ConformanceVersion;
+
+  struct CooperativeMatrixPropertiesNV
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCooperativeMatrixPropertiesNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesNV(uint32_t MSize_ = {}, uint32_t NSize_ = {}, uint32_t KSize_ = {}, VULKAN_HPP_NAMESPACE::ComponentTypeNV AType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, VULKAN_HPP_NAMESPACE::ComponentTypeNV BType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, VULKAN_HPP_NAMESPACE::ComponentTypeNV CType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, VULKAN_HPP_NAMESPACE::ComponentTypeNV DType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, VULKAN_HPP_NAMESPACE::ScopeNV scope_ = VULKAN_HPP_NAMESPACE::ScopeNV::eDevice) VULKAN_HPP_NOEXCEPT
+    : MSize( MSize_ ), NSize( NSize_ ), KSize( KSize_ ), AType( AType_ ), BType( BType_ ), CType( CType_ ), DType( DType_ ), scope( scope_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesNV( CooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    CooperativeMatrixPropertiesNV( VkCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    CooperativeMatrixPropertiesNV & operator=( VkCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV const *>( &rhs );
+      return *this;
+    }
+
+    CooperativeMatrixPropertiesNV & operator=( CooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( CooperativeMatrixPropertiesNV ) );
+      return *this;
+    }
+
+    CooperativeMatrixPropertiesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    CooperativeMatrixPropertiesNV & setMSize( uint32_t MSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      MSize = MSize_;
+      return *this;
+    }
+
+    CooperativeMatrixPropertiesNV & setNSize( uint32_t NSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      NSize = NSize_;
+      return *this;
+    }
+
+    CooperativeMatrixPropertiesNV & setKSize( uint32_t KSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      KSize = KSize_;
+      return *this;
+    }
+
+    CooperativeMatrixPropertiesNV & setAType( VULKAN_HPP_NAMESPACE::ComponentTypeNV AType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      AType = AType_;
+      return *this;
+    }
+
+    CooperativeMatrixPropertiesNV & setBType( VULKAN_HPP_NAMESPACE::ComponentTypeNV BType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      BType = BType_;
+      return *this;
+    }
+
+    CooperativeMatrixPropertiesNV & setCType( VULKAN_HPP_NAMESPACE::ComponentTypeNV CType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      CType = CType_;
+      return *this;
+    }
+
+    CooperativeMatrixPropertiesNV & setDType( VULKAN_HPP_NAMESPACE::ComponentTypeNV DType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      DType = DType_;
+      return *this;
+    }
+
+    CooperativeMatrixPropertiesNV & setScope( VULKAN_HPP_NAMESPACE::ScopeNV scope_ ) VULKAN_HPP_NOEXCEPT
+    {
+      scope = scope_;
+      return *this;
+    }
+
+
+    operator VkCooperativeMatrixPropertiesNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCooperativeMatrixPropertiesNV*>( this );
+    }
+
+    operator VkCooperativeMatrixPropertiesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCooperativeMatrixPropertiesNV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( CooperativeMatrixPropertiesNV const& ) const = default;
+#else
+    bool operator==( CooperativeMatrixPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( MSize == rhs.MSize )
+          && ( NSize == rhs.NSize )
+          && ( KSize == rhs.KSize )
+          && ( AType == rhs.AType )
+          && ( BType == rhs.BType )
+          && ( CType == rhs.CType )
+          && ( DType == rhs.DType )
+          && ( scope == rhs.scope );
+    }
+
+    bool operator!=( CooperativeMatrixPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCooperativeMatrixPropertiesNV;
+    void* pNext = {};
+    uint32_t MSize = {};
+    uint32_t NSize = {};
+    uint32_t KSize = {};
+    VULKAN_HPP_NAMESPACE::ComponentTypeNV AType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16;
+    VULKAN_HPP_NAMESPACE::ComponentTypeNV BType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16;
+    VULKAN_HPP_NAMESPACE::ComponentTypeNV CType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16;
+    VULKAN_HPP_NAMESPACE::ComponentTypeNV DType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16;
+    VULKAN_HPP_NAMESPACE::ScopeNV scope = VULKAN_HPP_NAMESPACE::ScopeNV::eDevice;
+
+  };
+  static_assert( sizeof( CooperativeMatrixPropertiesNV ) == sizeof( VkCooperativeMatrixPropertiesNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<CooperativeMatrixPropertiesNV>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eCooperativeMatrixPropertiesNV>
+  {
+    using Type = CooperativeMatrixPropertiesNV;
+  };
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+  struct CopyAccelerationStructureInfoKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyAccelerationStructureInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR CopyAccelerationStructureInfoKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ = {}, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone) VULKAN_HPP_NOEXCEPT
+    : src( src_ ), dst( dst_ ), mode( mode_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR CopyAccelerationStructureInfoKHR( CopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    CopyAccelerationStructureInfoKHR( VkCopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    CopyAccelerationStructureInfoKHR & operator=( VkCopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+    CopyAccelerationStructureInfoKHR & operator=( CopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( CopyAccelerationStructureInfoKHR ) );
+      return *this;
+    }
+
+    CopyAccelerationStructureInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    CopyAccelerationStructureInfoKHR & setSrc( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ ) VULKAN_HPP_NOEXCEPT
+    {
+      src = src_;
+      return *this;
+    }
+
+    CopyAccelerationStructureInfoKHR & setDst( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dst = dst_;
+      return *this;
+    }
+
+    CopyAccelerationStructureInfoKHR & setMode( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      mode = mode_;
+      return *this;
+    }
+
+
+    operator VkCopyAccelerationStructureInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCopyAccelerationStructureInfoKHR*>( this );
+    }
+
+    operator VkCopyAccelerationStructureInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCopyAccelerationStructureInfoKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( CopyAccelerationStructureInfoKHR const& ) const = default;
+#else
+    bool operator==( CopyAccelerationStructureInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( src == rhs.src )
+          && ( dst == rhs.dst )
+          && ( mode == rhs.mode );
+    }
+
+    bool operator!=( CopyAccelerationStructureInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyAccelerationStructureInfoKHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src = {};
+    VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst = {};
+    VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone;
+
+  };
+  static_assert( sizeof( CopyAccelerationStructureInfoKHR ) == sizeof( VkCopyAccelerationStructureInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<CopyAccelerationStructureInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eCopyAccelerationStructureInfoKHR>
+  {
+    using Type = CopyAccelerationStructureInfoKHR;
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+  struct CopyAccelerationStructureToMemoryInfoKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyAccelerationStructureToMemoryInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    CopyAccelerationStructureToMemoryInfoKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dst_ = {}, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone) VULKAN_HPP_NOEXCEPT
+    : src( src_ ), dst( dst_ ), mode( mode_ )
+    {}
+
+    CopyAccelerationStructureToMemoryInfoKHR( CopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    CopyAccelerationStructureToMemoryInfoKHR( VkCopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    CopyAccelerationStructureToMemoryInfoKHR & operator=( VkCopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+    CopyAccelerationStructureToMemoryInfoKHR & operator=( CopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( CopyAccelerationStructureToMemoryInfoKHR ) );
+      return *this;
+    }
+
+    CopyAccelerationStructureToMemoryInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    CopyAccelerationStructureToMemoryInfoKHR & setSrc( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ ) VULKAN_HPP_NOEXCEPT
+    {
+      src = src_;
+      return *this;
+    }
+
+    CopyAccelerationStructureToMemoryInfoKHR & setDst( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & dst_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dst = dst_;
+      return *this;
+    }
+
+    CopyAccelerationStructureToMemoryInfoKHR & setMode( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      mode = mode_;
+      return *this;
+    }
+
+
+    operator VkCopyAccelerationStructureToMemoryInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR*>( this );
+    }
+
+    operator VkCopyAccelerationStructureToMemoryInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCopyAccelerationStructureToMemoryInfoKHR*>( this );
+    }
+
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyAccelerationStructureToMemoryInfoKHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src = {};
+    VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dst = {};
+    VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone;
+
+  };
+  static_assert( sizeof( CopyAccelerationStructureToMemoryInfoKHR ) == sizeof( VkCopyAccelerationStructureToMemoryInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<CopyAccelerationStructureToMemoryInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eCopyAccelerationStructureToMemoryInfoKHR>
+  {
+    using Type = CopyAccelerationStructureToMemoryInfoKHR;
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  struct CopyBufferInfo2KHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyBufferInfo2KHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR CopyBufferInfo2KHR(VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ = {}, VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ = {}, uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::BufferCopy2KHR* pRegions_ = {}) VULKAN_HPP_NOEXCEPT
+    : srcBuffer( srcBuffer_ ), dstBuffer( dstBuffer_ ), regionCount( regionCount_ ), pRegions( pRegions_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR CopyBufferInfo2KHR( CopyBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    CopyBufferInfo2KHR( VkCopyBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    CopyBufferInfo2KHR( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_, VULKAN_HPP_NAMESPACE::Buffer dstBuffer_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferCopy2KHR> const & regions_ )
+    : srcBuffer( srcBuffer_ ), dstBuffer( dstBuffer_ ), regionCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    CopyBufferInfo2KHR & operator=( VkCopyBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyBufferInfo2KHR const *>( &rhs );
+      return *this;
+    }
+
+    CopyBufferInfo2KHR & operator=( CopyBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( CopyBufferInfo2KHR ) );
+      return *this;
+    }
+
+    CopyBufferInfo2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    CopyBufferInfo2KHR & setSrcBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcBuffer = srcBuffer_;
+      return *this;
+    }
+
+    CopyBufferInfo2KHR & setDstBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstBuffer = dstBuffer_;
+      return *this;
+    }
+
+    CopyBufferInfo2KHR & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      regionCount = regionCount_;
+      return *this;
+    }
+
+    CopyBufferInfo2KHR & setPRegions( const VULKAN_HPP_NAMESPACE::BufferCopy2KHR* pRegions_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pRegions = pRegions_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    CopyBufferInfo2KHR & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferCopy2KHR> const & regions_ ) VULKAN_HPP_NOEXCEPT
+    {
+      regionCount = static_cast<uint32_t>( regions_.size() );
+      pRegions = regions_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkCopyBufferInfo2KHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCopyBufferInfo2KHR*>( this );
+    }
+
+    operator VkCopyBufferInfo2KHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCopyBufferInfo2KHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( CopyBufferInfo2KHR const& ) const = default;
+#else
+    bool operator==( CopyBufferInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( srcBuffer == rhs.srcBuffer )
+          && ( dstBuffer == rhs.dstBuffer )
+          && ( regionCount == rhs.regionCount )
+          && ( pRegions == rhs.pRegions );
+    }
+
+    bool operator!=( CopyBufferInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyBufferInfo2KHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Buffer srcBuffer = {};
+    VULKAN_HPP_NAMESPACE::Buffer dstBuffer = {};
+    uint32_t regionCount = {};
+    const VULKAN_HPP_NAMESPACE::BufferCopy2KHR* pRegions = {};
+
+  };
+  static_assert( sizeof( CopyBufferInfo2KHR ) == sizeof( VkCopyBufferInfo2KHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<CopyBufferInfo2KHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eCopyBufferInfo2KHR>
+  {
+    using Type = CopyBufferInfo2KHR;
+  };
+
+  struct CopyBufferToImageInfo2KHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyBufferToImageInfo2KHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR CopyBufferToImageInfo2KHR(VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ = {}, VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR* pRegions_ = {}) VULKAN_HPP_NOEXCEPT
+    : srcBuffer( srcBuffer_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( regionCount_ ), pRegions( pRegions_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR CopyBufferToImageInfo2KHR( CopyBufferToImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    CopyBufferToImageInfo2KHR( VkCopyBufferToImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    CopyBufferToImageInfo2KHR( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_, VULKAN_HPP_NAMESPACE::Image dstImage_, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR> const & regions_ )
+    : srcBuffer( srcBuffer_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    CopyBufferToImageInfo2KHR & operator=( VkCopyBufferToImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2KHR const *>( &rhs );
+      return *this;
+    }
+
+    CopyBufferToImageInfo2KHR & operator=( CopyBufferToImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( CopyBufferToImageInfo2KHR ) );
+      return *this;
+    }
+
+    CopyBufferToImageInfo2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    CopyBufferToImageInfo2KHR & setSrcBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcBuffer = srcBuffer_;
+      return *this;
+    }
+
+    CopyBufferToImageInfo2KHR & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstImage = dstImage_;
+      return *this;
+    }
+
+    CopyBufferToImageInfo2KHR & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstImageLayout = dstImageLayout_;
+      return *this;
+    }
+
+    CopyBufferToImageInfo2KHR & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      regionCount = regionCount_;
+      return *this;
+    }
+
+    CopyBufferToImageInfo2KHR & setPRegions( const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR* pRegions_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pRegions = pRegions_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    CopyBufferToImageInfo2KHR & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR> const & regions_ ) VULKAN_HPP_NOEXCEPT
+    {
+      regionCount = static_cast<uint32_t>( regions_.size() );
+      pRegions = regions_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkCopyBufferToImageInfo2KHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCopyBufferToImageInfo2KHR*>( this );
+    }
+
+    operator VkCopyBufferToImageInfo2KHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCopyBufferToImageInfo2KHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( CopyBufferToImageInfo2KHR const& ) const = default;
+#else
+    bool operator==( CopyBufferToImageInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( srcBuffer == rhs.srcBuffer )
+          && ( dstImage == rhs.dstImage )
+          && ( dstImageLayout == rhs.dstImageLayout )
+          && ( regionCount == rhs.regionCount )
+          && ( pRegions == rhs.pRegions );
+    }
+
+    bool operator!=( CopyBufferToImageInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyBufferToImageInfo2KHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Buffer srcBuffer = {};
+    VULKAN_HPP_NAMESPACE::Image dstImage = {};
+    VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+    uint32_t regionCount = {};
+    const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR* pRegions = {};
+
+  };
+  static_assert( sizeof( CopyBufferToImageInfo2KHR ) == sizeof( VkCopyBufferToImageInfo2KHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<CopyBufferToImageInfo2KHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eCopyBufferToImageInfo2KHR>
+  {
+    using Type = CopyBufferToImageInfo2KHR;
+  };
+
+  class DescriptorSet
+  {
+  public:
+    using CType = VkDescriptorSet;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSet;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSet;
+
+  public:
+    VULKAN_HPP_CONSTEXPR DescriptorSet() VULKAN_HPP_NOEXCEPT
+      : m_descriptorSet(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR DescriptorSet( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_descriptorSet(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorSet( VkDescriptorSet descriptorSet ) VULKAN_HPP_NOEXCEPT
+      : m_descriptorSet( descriptorSet )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    DescriptorSet & operator=(VkDescriptorSet descriptorSet) VULKAN_HPP_NOEXCEPT
+    {
+      m_descriptorSet = descriptorSet;
+      return *this;
+    }
+#endif
+
+    DescriptorSet & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_descriptorSet = VK_NULL_HANDLE;
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DescriptorSet const& ) const = default;
+#else
+    bool operator==( DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorSet == rhs.m_descriptorSet;
+    }
+
+    bool operator!=(DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorSet != rhs.m_descriptorSet;
+    }
+
+    bool operator<(DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorSet < rhs.m_descriptorSet;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorSet() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorSet;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorSet != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorSet == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkDescriptorSet m_descriptorSet;
+  };
+  static_assert( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSet ) == sizeof( VkDescriptorSet ), "handle and wrapper have different size!" );
+
+  template <>
+  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eDescriptorSet>
+  {
+    using type = VULKAN_HPP_NAMESPACE::DescriptorSet;
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSet>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::DescriptorSet;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSet>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::DescriptorSet;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DescriptorSet>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  struct CopyDescriptorSet
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyDescriptorSet;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR CopyDescriptorSet(VULKAN_HPP_NAMESPACE::DescriptorSet srcSet_ = {}, uint32_t srcBinding_ = {}, uint32_t srcArrayElement_ = {}, VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ = {}, uint32_t dstBinding_ = {}, uint32_t dstArrayElement_ = {}, uint32_t descriptorCount_ = {}) VULKAN_HPP_NOEXCEPT
+    : srcSet( srcSet_ ), srcBinding( srcBinding_ ), srcArrayElement( srcArrayElement_ ), dstSet( dstSet_ ), dstBinding( dstBinding_ ), dstArrayElement( dstArrayElement_ ), descriptorCount( descriptorCount_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR CopyDescriptorSet( CopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    CopyDescriptorSet( VkCopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    CopyDescriptorSet & operator=( VkCopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyDescriptorSet const *>( &rhs );
+      return *this;
+    }
+
+    CopyDescriptorSet & operator=( CopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( CopyDescriptorSet ) );
+      return *this;
+    }
+
+    CopyDescriptorSet & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    CopyDescriptorSet & setSrcSet( VULKAN_HPP_NAMESPACE::DescriptorSet srcSet_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcSet = srcSet_;
+      return *this;
+    }
+
+    CopyDescriptorSet & setSrcBinding( uint32_t srcBinding_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcBinding = srcBinding_;
+      return *this;
+    }
+
+    CopyDescriptorSet & setSrcArrayElement( uint32_t srcArrayElement_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcArrayElement = srcArrayElement_;
+      return *this;
+    }
+
+    CopyDescriptorSet & setDstSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstSet = dstSet_;
+      return *this;
+    }
+
+    CopyDescriptorSet & setDstBinding( uint32_t dstBinding_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstBinding = dstBinding_;
+      return *this;
+    }
+
+    CopyDescriptorSet & setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstArrayElement = dstArrayElement_;
+      return *this;
+    }
+
+    CopyDescriptorSet & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorCount = descriptorCount_;
+      return *this;
+    }
+
+
+    operator VkCopyDescriptorSet const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCopyDescriptorSet*>( this );
+    }
+
+    operator VkCopyDescriptorSet &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCopyDescriptorSet*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( CopyDescriptorSet const& ) const = default;
+#else
+    bool operator==( CopyDescriptorSet const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( srcSet == rhs.srcSet )
+          && ( srcBinding == rhs.srcBinding )
+          && ( srcArrayElement == rhs.srcArrayElement )
+          && ( dstSet == rhs.dstSet )
+          && ( dstBinding == rhs.dstBinding )
+          && ( dstArrayElement == rhs.dstArrayElement )
+          && ( descriptorCount == rhs.descriptorCount );
+    }
+
+    bool operator!=( CopyDescriptorSet const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyDescriptorSet;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::DescriptorSet srcSet = {};
+    uint32_t srcBinding = {};
+    uint32_t srcArrayElement = {};
+    VULKAN_HPP_NAMESPACE::DescriptorSet dstSet = {};
+    uint32_t dstBinding = {};
+    uint32_t dstArrayElement = {};
+    uint32_t descriptorCount = {};
+
+  };
+  static_assert( sizeof( CopyDescriptorSet ) == sizeof( VkCopyDescriptorSet ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<CopyDescriptorSet>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eCopyDescriptorSet>
+  {
+    using Type = CopyDescriptorSet;
+  };
+
+  struct ImageCopy2KHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageCopy2KHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ImageCopy2KHR(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}) VULKAN_HPP_NOEXCEPT
+    : srcSubresource( srcSubresource_ ), srcOffset( srcOffset_ ), dstSubresource( dstSubresource_ ), dstOffset( dstOffset_ ), extent( extent_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImageCopy2KHR( ImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImageCopy2KHR( VkImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ImageCopy2KHR & operator=( VkImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageCopy2KHR const *>( &rhs );
+      return *this;
+    }
+
+    ImageCopy2KHR & operator=( ImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ImageCopy2KHR ) );
+      return *this;
+    }
+
+    ImageCopy2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImageCopy2KHR & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcSubresource = srcSubresource_;
+      return *this;
+    }
+
+    ImageCopy2KHR & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcOffset = srcOffset_;
+      return *this;
+    }
+
+    ImageCopy2KHR & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstSubresource = dstSubresource_;
+      return *this;
+    }
+
+    ImageCopy2KHR & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstOffset = dstOffset_;
+      return *this;
+    }
+
+    ImageCopy2KHR & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
     {
       extent = extent_;
       return *this;
     }
 
-    ImageCreateInfo& setMipLevels( uint32_t mipLevels_ )
+
+    operator VkImageCopy2KHR const&() const VULKAN_HPP_NOEXCEPT
     {
-      mipLevels = mipLevels_;
-      return *this;
+      return *reinterpret_cast<const VkImageCopy2KHR*>( this );
     }
 
-    ImageCreateInfo& setArrayLayers( uint32_t arrayLayers_ )
+    operator VkImageCopy2KHR &() VULKAN_HPP_NOEXCEPT
     {
-      arrayLayers = arrayLayers_;
-      return *this;
+      return *reinterpret_cast<VkImageCopy2KHR*>( this );
     }
 
-    ImageCreateInfo& setSamples( SampleCountFlagBits samples_ )
-    {
-      samples = samples_;
-      return *this;
-    }
 
-    ImageCreateInfo& setTiling( ImageTiling tiling_ )
-    {
-      tiling = tiling_;
-      return *this;
-    }
-
-    ImageCreateInfo& setUsage( ImageUsageFlags usage_ )
-    {
-      usage = usage_;
-      return *this;
-    }
-
-    ImageCreateInfo& setSharingMode( SharingMode sharingMode_ )
-    {
-      sharingMode = sharingMode_;
-      return *this;
-    }
-
-    ImageCreateInfo& setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ )
-    {
-      queueFamilyIndexCount = queueFamilyIndexCount_;
-      return *this;
-    }
-
-    ImageCreateInfo& setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ )
-    {
-      pQueueFamilyIndices = pQueueFamilyIndices_;
-      return *this;
-    }
-
-    ImageCreateInfo& setInitialLayout( ImageLayout initialLayout_ )
-    {
-      initialLayout = initialLayout_;
-      return *this;
-    }
-
-    operator VkImageCreateInfo const&() const
-    {
-      return *reinterpret_cast<const VkImageCreateInfo*>(this);
-    }
-
-    operator VkImageCreateInfo &()
-    {
-      return *reinterpret_cast<VkImageCreateInfo*>(this);
-    }
-
-    bool operator==( ImageCreateInfo const& rhs ) const
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ImageCopy2KHR const& ) const = default;
+#else
+    bool operator==( ImageCopy2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( imageType == rhs.imageType )
-          && ( format == rhs.format )
-          && ( extent == rhs.extent )
-          && ( mipLevels == rhs.mipLevels )
-          && ( arrayLayers == rhs.arrayLayers )
-          && ( samples == rhs.samples )
-          && ( tiling == rhs.tiling )
-          && ( usage == rhs.usage )
-          && ( sharingMode == rhs.sharingMode )
-          && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount )
-          && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices )
-          && ( initialLayout == rhs.initialLayout );
+          && ( srcSubresource == rhs.srcSubresource )
+          && ( srcOffset == rhs.srcOffset )
+          && ( dstSubresource == rhs.dstSubresource )
+          && ( dstOffset == rhs.dstOffset )
+          && ( extent == rhs.extent );
     }
 
-    bool operator!=( ImageCreateInfo const& rhs ) const
+    bool operator!=( ImageCopy2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
+#endif
 
-  private:
-    StructureType sType = StructureType::eImageCreateInfo;
+
 
   public:
-    const void* pNext = nullptr;
-    ImageCreateFlags flags;
-    ImageType imageType;
-    Format format;
-    Extent3D extent;
-    uint32_t mipLevels;
-    uint32_t arrayLayers;
-    SampleCountFlagBits samples;
-    ImageTiling tiling;
-    ImageUsageFlags usage;
-    SharingMode sharingMode;
-    uint32_t queueFamilyIndexCount;
-    const uint32_t* pQueueFamilyIndices;
-    ImageLayout initialLayout;
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageCopy2KHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
+    VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {};
+    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
+    VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {};
+    VULKAN_HPP_NAMESPACE::Extent3D extent = {};
+
   };
-  static_assert( sizeof( ImageCreateInfo ) == sizeof( VkImageCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( sizeof( ImageCopy2KHR ) == sizeof( VkImageCopy2KHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImageCopy2KHR>::value, "struct wrapper is not a standard layout!" );
 
-  struct PipelineMultisampleStateCreateInfo
+  template <>
+  struct CppType<StructureType, StructureType::eImageCopy2KHR>
   {
-    PipelineMultisampleStateCreateInfo( PipelineMultisampleStateCreateFlags flags_ = PipelineMultisampleStateCreateFlags(),
-                                        SampleCountFlagBits rasterizationSamples_ = SampleCountFlagBits::e1,
-                                        Bool32 sampleShadingEnable_ = 0,
-                                        float minSampleShading_ = 0,
-                                        const SampleMask* pSampleMask_ = nullptr,
-                                        Bool32 alphaToCoverageEnable_ = 0,
-                                        Bool32 alphaToOneEnable_ = 0 )
-      : flags( flags_ )
-      , rasterizationSamples( rasterizationSamples_ )
-      , sampleShadingEnable( sampleShadingEnable_ )
-      , minSampleShading( minSampleShading_ )
-      , pSampleMask( pSampleMask_ )
-      , alphaToCoverageEnable( alphaToCoverageEnable_ )
-      , alphaToOneEnable( alphaToOneEnable_ )
+    using Type = ImageCopy2KHR;
+  };
+
+  struct CopyImageInfo2KHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyImageInfo2KHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR CopyImageInfo2KHR(VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageCopy2KHR* pRegions_ = {}) VULKAN_HPP_NOEXCEPT
+    : srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( regionCount_ ), pRegions( pRegions_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR CopyImageInfo2KHR( CopyImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    CopyImageInfo2KHR( VkCopyImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
     {
+      *this = rhs;
     }
 
-    PipelineMultisampleStateCreateInfo( VkPipelineMultisampleStateCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PipelineMultisampleStateCreateInfo ) );
-    }
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    CopyImageInfo2KHR( VULKAN_HPP_NAMESPACE::Image srcImage_, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, VULKAN_HPP_NAMESPACE::Image dstImage_, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageCopy2KHR> const & regions_ )
+    : srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    PipelineMultisampleStateCreateInfo& operator=( VkPipelineMultisampleStateCreateInfo const & rhs )
+    CopyImageInfo2KHR & operator=( VkCopyImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      memcpy( this, &rhs, sizeof( PipelineMultisampleStateCreateInfo ) );
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyImageInfo2KHR const *>( &rhs );
       return *this;
     }
-    PipelineMultisampleStateCreateInfo& setPNext( const void* pNext_ )
+
+    CopyImageInfo2KHR & operator=( CopyImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( CopyImageInfo2KHR ) );
+      return *this;
+    }
+
+    CopyImageInfo2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
       pNext = pNext_;
       return *this;
     }
 
-    PipelineMultisampleStateCreateInfo& setFlags( PipelineMultisampleStateCreateFlags flags_ )
+    CopyImageInfo2KHR & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcImage = srcImage_;
+      return *this;
+    }
+
+    CopyImageInfo2KHR & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcImageLayout = srcImageLayout_;
+      return *this;
+    }
+
+    CopyImageInfo2KHR & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstImage = dstImage_;
+      return *this;
+    }
+
+    CopyImageInfo2KHR & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstImageLayout = dstImageLayout_;
+      return *this;
+    }
+
+    CopyImageInfo2KHR & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      regionCount = regionCount_;
+      return *this;
+    }
+
+    CopyImageInfo2KHR & setPRegions( const VULKAN_HPP_NAMESPACE::ImageCopy2KHR* pRegions_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pRegions = pRegions_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    CopyImageInfo2KHR & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageCopy2KHR> const & regions_ ) VULKAN_HPP_NOEXCEPT
+    {
+      regionCount = static_cast<uint32_t>( regions_.size() );
+      pRegions = regions_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkCopyImageInfo2KHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCopyImageInfo2KHR*>( this );
+    }
+
+    operator VkCopyImageInfo2KHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCopyImageInfo2KHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( CopyImageInfo2KHR const& ) const = default;
+#else
+    bool operator==( CopyImageInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( srcImage == rhs.srcImage )
+          && ( srcImageLayout == rhs.srcImageLayout )
+          && ( dstImage == rhs.dstImage )
+          && ( dstImageLayout == rhs.dstImageLayout )
+          && ( regionCount == rhs.regionCount )
+          && ( pRegions == rhs.pRegions );
+    }
+
+    bool operator!=( CopyImageInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyImageInfo2KHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Image srcImage = {};
+    VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+    VULKAN_HPP_NAMESPACE::Image dstImage = {};
+    VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+    uint32_t regionCount = {};
+    const VULKAN_HPP_NAMESPACE::ImageCopy2KHR* pRegions = {};
+
+  };
+  static_assert( sizeof( CopyImageInfo2KHR ) == sizeof( VkCopyImageInfo2KHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<CopyImageInfo2KHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eCopyImageInfo2KHR>
+  {
+    using Type = CopyImageInfo2KHR;
+  };
+
+  struct CopyImageToBufferInfo2KHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyImageToBufferInfo2KHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR CopyImageToBufferInfo2KHR(VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ = {}, uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR* pRegions_ = {}) VULKAN_HPP_NOEXCEPT
+    : srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstBuffer( dstBuffer_ ), regionCount( regionCount_ ), pRegions( pRegions_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR CopyImageToBufferInfo2KHR( CopyImageToBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    CopyImageToBufferInfo2KHR( VkCopyImageToBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    CopyImageToBufferInfo2KHR( VULKAN_HPP_NAMESPACE::Image srcImage_, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, VULKAN_HPP_NAMESPACE::Buffer dstBuffer_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR> const & regions_ )
+    : srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstBuffer( dstBuffer_ ), regionCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    CopyImageToBufferInfo2KHR & operator=( VkCopyImageToBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2KHR const *>( &rhs );
+      return *this;
+    }
+
+    CopyImageToBufferInfo2KHR & operator=( CopyImageToBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( CopyImageToBufferInfo2KHR ) );
+      return *this;
+    }
+
+    CopyImageToBufferInfo2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    CopyImageToBufferInfo2KHR & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcImage = srcImage_;
+      return *this;
+    }
+
+    CopyImageToBufferInfo2KHR & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcImageLayout = srcImageLayout_;
+      return *this;
+    }
+
+    CopyImageToBufferInfo2KHR & setDstBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstBuffer = dstBuffer_;
+      return *this;
+    }
+
+    CopyImageToBufferInfo2KHR & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      regionCount = regionCount_;
+      return *this;
+    }
+
+    CopyImageToBufferInfo2KHR & setPRegions( const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR* pRegions_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pRegions = pRegions_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    CopyImageToBufferInfo2KHR & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR> const & regions_ ) VULKAN_HPP_NOEXCEPT
+    {
+      regionCount = static_cast<uint32_t>( regions_.size() );
+      pRegions = regions_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkCopyImageToBufferInfo2KHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCopyImageToBufferInfo2KHR*>( this );
+    }
+
+    operator VkCopyImageToBufferInfo2KHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCopyImageToBufferInfo2KHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( CopyImageToBufferInfo2KHR const& ) const = default;
+#else
+    bool operator==( CopyImageToBufferInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( srcImage == rhs.srcImage )
+          && ( srcImageLayout == rhs.srcImageLayout )
+          && ( dstBuffer == rhs.dstBuffer )
+          && ( regionCount == rhs.regionCount )
+          && ( pRegions == rhs.pRegions );
+    }
+
+    bool operator!=( CopyImageToBufferInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyImageToBufferInfo2KHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Image srcImage = {};
+    VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+    VULKAN_HPP_NAMESPACE::Buffer dstBuffer = {};
+    uint32_t regionCount = {};
+    const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR* pRegions = {};
+
+  };
+  static_assert( sizeof( CopyImageToBufferInfo2KHR ) == sizeof( VkCopyImageToBufferInfo2KHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<CopyImageToBufferInfo2KHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eCopyImageToBufferInfo2KHR>
+  {
+    using Type = CopyImageToBufferInfo2KHR;
+  };
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+  struct CopyMemoryToAccelerationStructureInfoKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyMemoryToAccelerationStructureInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    CopyMemoryToAccelerationStructureInfoKHR(VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR src_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ = {}, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone) VULKAN_HPP_NOEXCEPT
+    : src( src_ ), dst( dst_ ), mode( mode_ )
+    {}
+
+    CopyMemoryToAccelerationStructureInfoKHR( CopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    CopyMemoryToAccelerationStructureInfoKHR( VkCopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    CopyMemoryToAccelerationStructureInfoKHR & operator=( VkCopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+    CopyMemoryToAccelerationStructureInfoKHR & operator=( CopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( CopyMemoryToAccelerationStructureInfoKHR ) );
+      return *this;
+    }
+
+    CopyMemoryToAccelerationStructureInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    CopyMemoryToAccelerationStructureInfoKHR & setSrc( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & src_ ) VULKAN_HPP_NOEXCEPT
+    {
+      src = src_;
+      return *this;
+    }
+
+    CopyMemoryToAccelerationStructureInfoKHR & setDst( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dst = dst_;
+      return *this;
+    }
+
+    CopyMemoryToAccelerationStructureInfoKHR & setMode( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      mode = mode_;
+      return *this;
+    }
+
+
+    operator VkCopyMemoryToAccelerationStructureInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR*>( this );
+    }
+
+    operator VkCopyMemoryToAccelerationStructureInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCopyMemoryToAccelerationStructureInfoKHR*>( this );
+    }
+
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyMemoryToAccelerationStructureInfoKHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR src = {};
+    VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst = {};
+    VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone;
+
+  };
+  static_assert( sizeof( CopyMemoryToAccelerationStructureInfoKHR ) == sizeof( VkCopyMemoryToAccelerationStructureInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<CopyMemoryToAccelerationStructureInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eCopyMemoryToAccelerationStructureInfoKHR>
+  {
+    using Type = CopyMemoryToAccelerationStructureInfoKHR;
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  struct D3D12FenceSubmitInfoKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eD3D12FenceSubmitInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR D3D12FenceSubmitInfoKHR(uint32_t waitSemaphoreValuesCount_ = {}, const uint64_t* pWaitSemaphoreValues_ = {}, uint32_t signalSemaphoreValuesCount_ = {}, const uint64_t* pSignalSemaphoreValues_ = {}) VULKAN_HPP_NOEXCEPT
+    : waitSemaphoreValuesCount( waitSemaphoreValuesCount_ ), pWaitSemaphoreValues( pWaitSemaphoreValues_ ), signalSemaphoreValuesCount( signalSemaphoreValuesCount_ ), pSignalSemaphoreValues( pSignalSemaphoreValues_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR D3D12FenceSubmitInfoKHR( D3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    D3D12FenceSubmitInfoKHR( VkD3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    D3D12FenceSubmitInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & waitSemaphoreValues_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & signalSemaphoreValues_ = {} )
+    : waitSemaphoreValuesCount( static_cast<uint32_t>( waitSemaphoreValues_.size() ) ), pWaitSemaphoreValues( waitSemaphoreValues_.data() ), signalSemaphoreValuesCount( static_cast<uint32_t>( signalSemaphoreValues_.size() ) ), pSignalSemaphoreValues( signalSemaphoreValues_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    D3D12FenceSubmitInfoKHR & operator=( VkD3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+    D3D12FenceSubmitInfoKHR & operator=( D3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( D3D12FenceSubmitInfoKHR ) );
+      return *this;
+    }
+
+    D3D12FenceSubmitInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    D3D12FenceSubmitInfoKHR & setWaitSemaphoreValuesCount( uint32_t waitSemaphoreValuesCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      waitSemaphoreValuesCount = waitSemaphoreValuesCount_;
+      return *this;
+    }
+
+    D3D12FenceSubmitInfoKHR & setPWaitSemaphoreValues( const uint64_t* pWaitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pWaitSemaphoreValues = pWaitSemaphoreValues_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    D3D12FenceSubmitInfoKHR & setWaitSemaphoreValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & waitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
+    {
+      waitSemaphoreValuesCount = static_cast<uint32_t>( waitSemaphoreValues_.size() );
+      pWaitSemaphoreValues = waitSemaphoreValues_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    D3D12FenceSubmitInfoKHR & setSignalSemaphoreValuesCount( uint32_t signalSemaphoreValuesCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      signalSemaphoreValuesCount = signalSemaphoreValuesCount_;
+      return *this;
+    }
+
+    D3D12FenceSubmitInfoKHR & setPSignalSemaphoreValues( const uint64_t* pSignalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pSignalSemaphoreValues = pSignalSemaphoreValues_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    D3D12FenceSubmitInfoKHR & setSignalSemaphoreValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & signalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
+    {
+      signalSemaphoreValuesCount = static_cast<uint32_t>( signalSemaphoreValues_.size() );
+      pSignalSemaphoreValues = signalSemaphoreValues_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkD3D12FenceSubmitInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkD3D12FenceSubmitInfoKHR*>( this );
+    }
+
+    operator VkD3D12FenceSubmitInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkD3D12FenceSubmitInfoKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( D3D12FenceSubmitInfoKHR const& ) const = default;
+#else
+    bool operator==( D3D12FenceSubmitInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( waitSemaphoreValuesCount == rhs.waitSemaphoreValuesCount )
+          && ( pWaitSemaphoreValues == rhs.pWaitSemaphoreValues )
+          && ( signalSemaphoreValuesCount == rhs.signalSemaphoreValuesCount )
+          && ( pSignalSemaphoreValues == rhs.pSignalSemaphoreValues );
+    }
+
+    bool operator!=( D3D12FenceSubmitInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eD3D12FenceSubmitInfoKHR;
+    const void* pNext = {};
+    uint32_t waitSemaphoreValuesCount = {};
+    const uint64_t* pWaitSemaphoreValues = {};
+    uint32_t signalSemaphoreValuesCount = {};
+    const uint64_t* pSignalSemaphoreValues = {};
+
+  };
+  static_assert( sizeof( D3D12FenceSubmitInfoKHR ) == sizeof( VkD3D12FenceSubmitInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<D3D12FenceSubmitInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eD3D12FenceSubmitInfoKHR>
+  {
+    using Type = D3D12FenceSubmitInfoKHR;
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  struct DebugMarkerMarkerInfoEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugMarkerMarkerInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT(const char* pMarkerName_ = {}, std::array<float,4> const& color_ = {}) VULKAN_HPP_NOEXCEPT
+    : pMarkerName( pMarkerName_ ), color( color_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT( DebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DebugMarkerMarkerInfoEXT( VkDebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DebugMarkerMarkerInfoEXT & operator=( VkDebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+    DebugMarkerMarkerInfoEXT & operator=( DebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DebugMarkerMarkerInfoEXT ) );
+      return *this;
+    }
+
+    DebugMarkerMarkerInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DebugMarkerMarkerInfoEXT & setPMarkerName( const char* pMarkerName_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pMarkerName = pMarkerName_;
+      return *this;
+    }
+
+    DebugMarkerMarkerInfoEXT & setColor( std::array<float,4> color_ ) VULKAN_HPP_NOEXCEPT
+    {
+      color = color_;
+      return *this;
+    }
+
+
+    operator VkDebugMarkerMarkerInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDebugMarkerMarkerInfoEXT*>( this );
+    }
+
+    operator VkDebugMarkerMarkerInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDebugMarkerMarkerInfoEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DebugMarkerMarkerInfoEXT const& ) const = default;
+#else
+    bool operator==( DebugMarkerMarkerInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pMarkerName == rhs.pMarkerName )
+          && ( color == rhs.color );
+    }
+
+    bool operator!=( DebugMarkerMarkerInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerMarkerInfoEXT;
+    const void* pNext = {};
+    const char* pMarkerName = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 4> color = {};
+
+  };
+  static_assert( sizeof( DebugMarkerMarkerInfoEXT ) == sizeof( VkDebugMarkerMarkerInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DebugMarkerMarkerInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eDebugMarkerMarkerInfoEXT>
+  {
+    using Type = DebugMarkerMarkerInfoEXT;
+  };
+
+  struct DebugMarkerObjectNameInfoEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugMarkerObjectNameInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DebugMarkerObjectNameInfoEXT(VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown, uint64_t object_ = {}, const char* pObjectName_ = {}) VULKAN_HPP_NOEXCEPT
+    : objectType( objectType_ ), object( object_ ), pObjectName( pObjectName_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DebugMarkerObjectNameInfoEXT( DebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DebugMarkerObjectNameInfoEXT( VkDebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DebugMarkerObjectNameInfoEXT & operator=( VkDebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+    DebugMarkerObjectNameInfoEXT & operator=( DebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DebugMarkerObjectNameInfoEXT ) );
+      return *this;
+    }
+
+    DebugMarkerObjectNameInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DebugMarkerObjectNameInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      objectType = objectType_;
+      return *this;
+    }
+
+    DebugMarkerObjectNameInfoEXT & setObject( uint64_t object_ ) VULKAN_HPP_NOEXCEPT
+    {
+      object = object_;
+      return *this;
+    }
+
+    DebugMarkerObjectNameInfoEXT & setPObjectName( const char* pObjectName_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pObjectName = pObjectName_;
+      return *this;
+    }
+
+
+    operator VkDebugMarkerObjectNameInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT*>( this );
+    }
+
+    operator VkDebugMarkerObjectNameInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDebugMarkerObjectNameInfoEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DebugMarkerObjectNameInfoEXT const& ) const = default;
+#else
+    bool operator==( DebugMarkerObjectNameInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( objectType == rhs.objectType )
+          && ( object == rhs.object )
+          && ( pObjectName == rhs.pObjectName );
+    }
+
+    bool operator!=( DebugMarkerObjectNameInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerObjectNameInfoEXT;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
+    uint64_t object = {};
+    const char* pObjectName = {};
+
+  };
+  static_assert( sizeof( DebugMarkerObjectNameInfoEXT ) == sizeof( VkDebugMarkerObjectNameInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DebugMarkerObjectNameInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eDebugMarkerObjectNameInfoEXT>
+  {
+    using Type = DebugMarkerObjectNameInfoEXT;
+  };
+
+  struct DebugMarkerObjectTagInfoEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugMarkerObjectTagInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DebugMarkerObjectTagInfoEXT(VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown, uint64_t object_ = {}, uint64_t tagName_ = {}, size_t tagSize_ = {}, const void* pTag_ = {}) VULKAN_HPP_NOEXCEPT
+    : objectType( objectType_ ), object( object_ ), tagName( tagName_ ), tagSize( tagSize_ ), pTag( pTag_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DebugMarkerObjectTagInfoEXT( DebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DebugMarkerObjectTagInfoEXT( VkDebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    template <typename T>
+    DebugMarkerObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_, uint64_t object_, uint64_t tagName_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & tag_ )
+    : objectType( objectType_ ), object( object_ ), tagName( tagName_ ), tagSize( tag_.size() * sizeof(T) ), pTag( tag_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DebugMarkerObjectTagInfoEXT & operator=( VkDebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+    DebugMarkerObjectTagInfoEXT & operator=( DebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DebugMarkerObjectTagInfoEXT ) );
+      return *this;
+    }
+
+    DebugMarkerObjectTagInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DebugMarkerObjectTagInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      objectType = objectType_;
+      return *this;
+    }
+
+    DebugMarkerObjectTagInfoEXT & setObject( uint64_t object_ ) VULKAN_HPP_NOEXCEPT
+    {
+      object = object_;
+      return *this;
+    }
+
+    DebugMarkerObjectTagInfoEXT & setTagName( uint64_t tagName_ ) VULKAN_HPP_NOEXCEPT
+    {
+      tagName = tagName_;
+      return *this;
+    }
+
+    DebugMarkerObjectTagInfoEXT & setTagSize( size_t tagSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      tagSize = tagSize_;
+      return *this;
+    }
+
+    DebugMarkerObjectTagInfoEXT & setPTag( const void* pTag_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pTag = pTag_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    template <typename T>
+    DebugMarkerObjectTagInfoEXT & setTag( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & tag_ ) VULKAN_HPP_NOEXCEPT
+    {
+      tagSize = tag_.size() * sizeof(T);
+      pTag = tag_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkDebugMarkerObjectTagInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT*>( this );
+    }
+
+    operator VkDebugMarkerObjectTagInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDebugMarkerObjectTagInfoEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DebugMarkerObjectTagInfoEXT const& ) const = default;
+#else
+    bool operator==( DebugMarkerObjectTagInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( objectType == rhs.objectType )
+          && ( object == rhs.object )
+          && ( tagName == rhs.tagName )
+          && ( tagSize == rhs.tagSize )
+          && ( pTag == rhs.pTag );
+    }
+
+    bool operator!=( DebugMarkerObjectTagInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerObjectTagInfoEXT;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
+    uint64_t object = {};
+    uint64_t tagName = {};
+    size_t tagSize = {};
+    const void* pTag = {};
+
+  };
+  static_assert( sizeof( DebugMarkerObjectTagInfoEXT ) == sizeof( VkDebugMarkerObjectTagInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DebugMarkerObjectTagInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eDebugMarkerObjectTagInfoEXT>
+  {
+    using Type = DebugMarkerObjectTagInfoEXT;
+  };
+
+  struct DebugReportCallbackCreateInfoEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugReportCallbackCreateInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DebugReportCallbackCreateInfoEXT(VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags_ = {}, PFN_vkDebugReportCallbackEXT pfnCallback_ = {}, void* pUserData_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), pfnCallback( pfnCallback_ ), pUserData( pUserData_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DebugReportCallbackCreateInfoEXT( DebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DebugReportCallbackCreateInfoEXT( VkDebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DebugReportCallbackCreateInfoEXT & operator=( VkDebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+    DebugReportCallbackCreateInfoEXT & operator=( DebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DebugReportCallbackCreateInfoEXT ) );
+      return *this;
+    }
+
+    DebugReportCallbackCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DebugReportCallbackCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
     {
       flags = flags_;
       return *this;
     }
 
-    PipelineMultisampleStateCreateInfo& setRasterizationSamples( SampleCountFlagBits rasterizationSamples_ )
+    DebugReportCallbackCreateInfoEXT & setPfnCallback( PFN_vkDebugReportCallbackEXT pfnCallback_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pfnCallback = pfnCallback_;
+      return *this;
+    }
+
+    DebugReportCallbackCreateInfoEXT & setPUserData( void* pUserData_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pUserData = pUserData_;
+      return *this;
+    }
+
+
+    operator VkDebugReportCallbackCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT*>( this );
+    }
+
+    operator VkDebugReportCallbackCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDebugReportCallbackCreateInfoEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DebugReportCallbackCreateInfoEXT const& ) const = default;
+#else
+    bool operator==( DebugReportCallbackCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( pfnCallback == rhs.pfnCallback )
+          && ( pUserData == rhs.pUserData );
+    }
+
+    bool operator!=( DebugReportCallbackCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugReportCallbackCreateInfoEXT;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags = {};
+    PFN_vkDebugReportCallbackEXT pfnCallback = {};
+    void* pUserData = {};
+
+  };
+  static_assert( sizeof( DebugReportCallbackCreateInfoEXT ) == sizeof( VkDebugReportCallbackCreateInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DebugReportCallbackCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eDebugReportCallbackCreateInfoEXT>
+  {
+    using Type = DebugReportCallbackCreateInfoEXT;
+  };
+
+  struct DebugUtilsLabelEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsLabelEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT(const char* pLabelName_ = {}, std::array<float,4> const& color_ = {}) VULKAN_HPP_NOEXCEPT
+    : pLabelName( pLabelName_ ), color( color_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT( DebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DebugUtilsLabelEXT( VkDebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DebugUtilsLabelEXT & operator=( VkDebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT const *>( &rhs );
+      return *this;
+    }
+
+    DebugUtilsLabelEXT & operator=( DebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DebugUtilsLabelEXT ) );
+      return *this;
+    }
+
+    DebugUtilsLabelEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DebugUtilsLabelEXT & setPLabelName( const char* pLabelName_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pLabelName = pLabelName_;
+      return *this;
+    }
+
+    DebugUtilsLabelEXT & setColor( std::array<float,4> color_ ) VULKAN_HPP_NOEXCEPT
+    {
+      color = color_;
+      return *this;
+    }
+
+
+    operator VkDebugUtilsLabelEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDebugUtilsLabelEXT*>( this );
+    }
+
+    operator VkDebugUtilsLabelEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDebugUtilsLabelEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DebugUtilsLabelEXT const& ) const = default;
+#else
+    bool operator==( DebugUtilsLabelEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pLabelName == rhs.pLabelName )
+          && ( color == rhs.color );
+    }
+
+    bool operator!=( DebugUtilsLabelEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsLabelEXT;
+    const void* pNext = {};
+    const char* pLabelName = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 4> color = {};
+
+  };
+  static_assert( sizeof( DebugUtilsLabelEXT ) == sizeof( VkDebugUtilsLabelEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DebugUtilsLabelEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eDebugUtilsLabelEXT>
+  {
+    using Type = DebugUtilsLabelEXT;
+  };
+
+  struct DebugUtilsObjectNameInfoEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsObjectNameInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DebugUtilsObjectNameInfoEXT(VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown, uint64_t objectHandle_ = {}, const char* pObjectName_ = {}) VULKAN_HPP_NOEXCEPT
+    : objectType( objectType_ ), objectHandle( objectHandle_ ), pObjectName( pObjectName_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DebugUtilsObjectNameInfoEXT( DebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DebugUtilsObjectNameInfoEXT( VkDebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DebugUtilsObjectNameInfoEXT & operator=( VkDebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+    DebugUtilsObjectNameInfoEXT & operator=( DebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DebugUtilsObjectNameInfoEXT ) );
+      return *this;
+    }
+
+    DebugUtilsObjectNameInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DebugUtilsObjectNameInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::ObjectType objectType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      objectType = objectType_;
+      return *this;
+    }
+
+    DebugUtilsObjectNameInfoEXT & setObjectHandle( uint64_t objectHandle_ ) VULKAN_HPP_NOEXCEPT
+    {
+      objectHandle = objectHandle_;
+      return *this;
+    }
+
+    DebugUtilsObjectNameInfoEXT & setPObjectName( const char* pObjectName_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pObjectName = pObjectName_;
+      return *this;
+    }
+
+
+    operator VkDebugUtilsObjectNameInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT*>( this );
+    }
+
+    operator VkDebugUtilsObjectNameInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDebugUtilsObjectNameInfoEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DebugUtilsObjectNameInfoEXT const& ) const = default;
+#else
+    bool operator==( DebugUtilsObjectNameInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( objectType == rhs.objectType )
+          && ( objectHandle == rhs.objectHandle )
+          && ( pObjectName == rhs.pObjectName );
+    }
+
+    bool operator!=( DebugUtilsObjectNameInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsObjectNameInfoEXT;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown;
+    uint64_t objectHandle = {};
+    const char* pObjectName = {};
+
+  };
+  static_assert( sizeof( DebugUtilsObjectNameInfoEXT ) == sizeof( VkDebugUtilsObjectNameInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DebugUtilsObjectNameInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eDebugUtilsObjectNameInfoEXT>
+  {
+    using Type = DebugUtilsObjectNameInfoEXT;
+  };
+
+  struct DebugUtilsMessengerCallbackDataEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsMessengerCallbackDataEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT(VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_ = {}, const char* pMessageIdName_ = {}, int32_t messageIdNumber_ = {}, const char* pMessage_ = {}, uint32_t queueLabelCount_ = {}, const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pQueueLabels_ = {}, uint32_t cmdBufLabelCount_ = {}, const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pCmdBufLabels_ = {}, uint32_t objectCount_ = {}, const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT* pObjects_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), pMessageIdName( pMessageIdName_ ), messageIdNumber( messageIdNumber_ ), pMessage( pMessage_ ), queueLabelCount( queueLabelCount_ ), pQueueLabels( pQueueLabels_ ), cmdBufLabelCount( cmdBufLabelCount_ ), pCmdBufLabels( pCmdBufLabels_ ), objectCount( objectCount_ ), pObjects( pObjects_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT( DebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DebugUtilsMessengerCallbackDataEXT( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    DebugUtilsMessengerCallbackDataEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_, const char* pMessageIdName_, int32_t messageIdNumber_, const char* pMessage_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT> const & queueLabels_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT> const & cmdBufLabels_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT> const & objects_ = {} )
+    : flags( flags_ ), pMessageIdName( pMessageIdName_ ), messageIdNumber( messageIdNumber_ ), pMessage( pMessage_ ), queueLabelCount( static_cast<uint32_t>( queueLabels_.size() ) ), pQueueLabels( queueLabels_.data() ), cmdBufLabelCount( static_cast<uint32_t>( cmdBufLabels_.size() ) ), pCmdBufLabels( cmdBufLabels_.data() ), objectCount( static_cast<uint32_t>( objects_.size() ) ), pObjects( objects_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DebugUtilsMessengerCallbackDataEXT & operator=( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT const *>( &rhs );
+      return *this;
+    }
+
+    DebugUtilsMessengerCallbackDataEXT & operator=( DebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DebugUtilsMessengerCallbackDataEXT ) );
+      return *this;
+    }
+
+    DebugUtilsMessengerCallbackDataEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DebugUtilsMessengerCallbackDataEXT & setFlags( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    DebugUtilsMessengerCallbackDataEXT & setPMessageIdName( const char* pMessageIdName_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pMessageIdName = pMessageIdName_;
+      return *this;
+    }
+
+    DebugUtilsMessengerCallbackDataEXT & setMessageIdNumber( int32_t messageIdNumber_ ) VULKAN_HPP_NOEXCEPT
+    {
+      messageIdNumber = messageIdNumber_;
+      return *this;
+    }
+
+    DebugUtilsMessengerCallbackDataEXT & setPMessage( const char* pMessage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pMessage = pMessage_;
+      return *this;
+    }
+
+    DebugUtilsMessengerCallbackDataEXT & setQueueLabelCount( uint32_t queueLabelCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queueLabelCount = queueLabelCount_;
+      return *this;
+    }
+
+    DebugUtilsMessengerCallbackDataEXT & setPQueueLabels( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pQueueLabels_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pQueueLabels = pQueueLabels_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    DebugUtilsMessengerCallbackDataEXT & setQueueLabels( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT> const & queueLabels_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queueLabelCount = static_cast<uint32_t>( queueLabels_.size() );
+      pQueueLabels = queueLabels_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    DebugUtilsMessengerCallbackDataEXT & setCmdBufLabelCount( uint32_t cmdBufLabelCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      cmdBufLabelCount = cmdBufLabelCount_;
+      return *this;
+    }
+
+    DebugUtilsMessengerCallbackDataEXT & setPCmdBufLabels( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pCmdBufLabels_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pCmdBufLabels = pCmdBufLabels_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    DebugUtilsMessengerCallbackDataEXT & setCmdBufLabels( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT> const & cmdBufLabels_ ) VULKAN_HPP_NOEXCEPT
+    {
+      cmdBufLabelCount = static_cast<uint32_t>( cmdBufLabels_.size() );
+      pCmdBufLabels = cmdBufLabels_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    DebugUtilsMessengerCallbackDataEXT & setObjectCount( uint32_t objectCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      objectCount = objectCount_;
+      return *this;
+    }
+
+    DebugUtilsMessengerCallbackDataEXT & setPObjects( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT* pObjects_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pObjects = pObjects_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    DebugUtilsMessengerCallbackDataEXT & setObjects( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT> const & objects_ ) VULKAN_HPP_NOEXCEPT
+    {
+      objectCount = static_cast<uint32_t>( objects_.size() );
+      pObjects = objects_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkDebugUtilsMessengerCallbackDataEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT*>( this );
+    }
+
+    operator VkDebugUtilsMessengerCallbackDataEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDebugUtilsMessengerCallbackDataEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DebugUtilsMessengerCallbackDataEXT const& ) const = default;
+#else
+    bool operator==( DebugUtilsMessengerCallbackDataEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( pMessageIdName == rhs.pMessageIdName )
+          && ( messageIdNumber == rhs.messageIdNumber )
+          && ( pMessage == rhs.pMessage )
+          && ( queueLabelCount == rhs.queueLabelCount )
+          && ( pQueueLabels == rhs.pQueueLabels )
+          && ( cmdBufLabelCount == rhs.cmdBufLabelCount )
+          && ( pCmdBufLabels == rhs.pCmdBufLabels )
+          && ( objectCount == rhs.objectCount )
+          && ( pObjects == rhs.pObjects );
+    }
+
+    bool operator!=( DebugUtilsMessengerCallbackDataEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsMessengerCallbackDataEXT;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags = {};
+    const char* pMessageIdName = {};
+    int32_t messageIdNumber = {};
+    const char* pMessage = {};
+    uint32_t queueLabelCount = {};
+    const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pQueueLabels = {};
+    uint32_t cmdBufLabelCount = {};
+    const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pCmdBufLabels = {};
+    uint32_t objectCount = {};
+    const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT* pObjects = {};
+
+  };
+  static_assert( sizeof( DebugUtilsMessengerCallbackDataEXT ) == sizeof( VkDebugUtilsMessengerCallbackDataEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DebugUtilsMessengerCallbackDataEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eDebugUtilsMessengerCallbackDataEXT>
+  {
+    using Type = DebugUtilsMessengerCallbackDataEXT;
+  };
+
+  struct DebugUtilsMessengerCreateInfoEXT
+  {
+    static const bool allowDuplicate = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsMessengerCreateInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DebugUtilsMessengerCreateInfoEXT(VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags_ = {}, VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity_ = {}, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType_ = {}, PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ = {}, void* pUserData_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), messageSeverity( messageSeverity_ ), messageType( messageType_ ), pfnUserCallback( pfnUserCallback_ ), pUserData( pUserData_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DebugUtilsMessengerCreateInfoEXT( DebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DebugUtilsMessengerCreateInfoEXT( VkDebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DebugUtilsMessengerCreateInfoEXT & operator=( VkDebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+    DebugUtilsMessengerCreateInfoEXT & operator=( DebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DebugUtilsMessengerCreateInfoEXT ) );
+      return *this;
+    }
+
+    DebugUtilsMessengerCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DebugUtilsMessengerCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    DebugUtilsMessengerCreateInfoEXT & setMessageSeverity( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity_ ) VULKAN_HPP_NOEXCEPT
+    {
+      messageSeverity = messageSeverity_;
+      return *this;
+    }
+
+    DebugUtilsMessengerCreateInfoEXT & setMessageType( VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      messageType = messageType_;
+      return *this;
+    }
+
+    DebugUtilsMessengerCreateInfoEXT & setPfnUserCallback( PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pfnUserCallback = pfnUserCallback_;
+      return *this;
+    }
+
+    DebugUtilsMessengerCreateInfoEXT & setPUserData( void* pUserData_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pUserData = pUserData_;
+      return *this;
+    }
+
+
+    operator VkDebugUtilsMessengerCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT*>( this );
+    }
+
+    operator VkDebugUtilsMessengerCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDebugUtilsMessengerCreateInfoEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DebugUtilsMessengerCreateInfoEXT const& ) const = default;
+#else
+    bool operator==( DebugUtilsMessengerCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( messageSeverity == rhs.messageSeverity )
+          && ( messageType == rhs.messageType )
+          && ( pfnUserCallback == rhs.pfnUserCallback )
+          && ( pUserData == rhs.pUserData );
+    }
+
+    bool operator!=( DebugUtilsMessengerCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsMessengerCreateInfoEXT;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags = {};
+    VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity = {};
+    VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType = {};
+    PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback = {};
+    void* pUserData = {};
+
+  };
+  static_assert( sizeof( DebugUtilsMessengerCreateInfoEXT ) == sizeof( VkDebugUtilsMessengerCreateInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DebugUtilsMessengerCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eDebugUtilsMessengerCreateInfoEXT>
+  {
+    using Type = DebugUtilsMessengerCreateInfoEXT;
+  };
+
+  struct DebugUtilsObjectTagInfoEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsObjectTagInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DebugUtilsObjectTagInfoEXT(VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown, uint64_t objectHandle_ = {}, uint64_t tagName_ = {}, size_t tagSize_ = {}, const void* pTag_ = {}) VULKAN_HPP_NOEXCEPT
+    : objectType( objectType_ ), objectHandle( objectHandle_ ), tagName( tagName_ ), tagSize( tagSize_ ), pTag( pTag_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DebugUtilsObjectTagInfoEXT( DebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DebugUtilsObjectTagInfoEXT( VkDebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    template <typename T>
+    DebugUtilsObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, uint64_t objectHandle_, uint64_t tagName_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & tag_ )
+    : objectType( objectType_ ), objectHandle( objectHandle_ ), tagName( tagName_ ), tagSize( tag_.size() * sizeof(T) ), pTag( tag_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DebugUtilsObjectTagInfoEXT & operator=( VkDebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+    DebugUtilsObjectTagInfoEXT & operator=( DebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DebugUtilsObjectTagInfoEXT ) );
+      return *this;
+    }
+
+    DebugUtilsObjectTagInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DebugUtilsObjectTagInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::ObjectType objectType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      objectType = objectType_;
+      return *this;
+    }
+
+    DebugUtilsObjectTagInfoEXT & setObjectHandle( uint64_t objectHandle_ ) VULKAN_HPP_NOEXCEPT
+    {
+      objectHandle = objectHandle_;
+      return *this;
+    }
+
+    DebugUtilsObjectTagInfoEXT & setTagName( uint64_t tagName_ ) VULKAN_HPP_NOEXCEPT
+    {
+      tagName = tagName_;
+      return *this;
+    }
+
+    DebugUtilsObjectTagInfoEXT & setTagSize( size_t tagSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      tagSize = tagSize_;
+      return *this;
+    }
+
+    DebugUtilsObjectTagInfoEXT & setPTag( const void* pTag_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pTag = pTag_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    template <typename T>
+    DebugUtilsObjectTagInfoEXT & setTag( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & tag_ ) VULKAN_HPP_NOEXCEPT
+    {
+      tagSize = tag_.size() * sizeof(T);
+      pTag = tag_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkDebugUtilsObjectTagInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT*>( this );
+    }
+
+    operator VkDebugUtilsObjectTagInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDebugUtilsObjectTagInfoEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DebugUtilsObjectTagInfoEXT const& ) const = default;
+#else
+    bool operator==( DebugUtilsObjectTagInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( objectType == rhs.objectType )
+          && ( objectHandle == rhs.objectHandle )
+          && ( tagName == rhs.tagName )
+          && ( tagSize == rhs.tagSize )
+          && ( pTag == rhs.pTag );
+    }
+
+    bool operator!=( DebugUtilsObjectTagInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsObjectTagInfoEXT;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown;
+    uint64_t objectHandle = {};
+    uint64_t tagName = {};
+    size_t tagSize = {};
+    const void* pTag = {};
+
+  };
+  static_assert( sizeof( DebugUtilsObjectTagInfoEXT ) == sizeof( VkDebugUtilsObjectTagInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DebugUtilsObjectTagInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eDebugUtilsObjectTagInfoEXT>
+  {
+    using Type = DebugUtilsObjectTagInfoEXT;
+  };
+
+  struct DedicatedAllocationBufferCreateInfoNV
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDedicatedAllocationBufferCreateInfoNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DedicatedAllocationBufferCreateInfoNV(VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ = {}) VULKAN_HPP_NOEXCEPT
+    : dedicatedAllocation( dedicatedAllocation_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DedicatedAllocationBufferCreateInfoNV( DedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DedicatedAllocationBufferCreateInfoNV( VkDedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DedicatedAllocationBufferCreateInfoNV & operator=( VkDedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DedicatedAllocationBufferCreateInfoNV const *>( &rhs );
+      return *this;
+    }
+
+    DedicatedAllocationBufferCreateInfoNV & operator=( DedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DedicatedAllocationBufferCreateInfoNV ) );
+      return *this;
+    }
+
+    DedicatedAllocationBufferCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DedicatedAllocationBufferCreateInfoNV & setDedicatedAllocation( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dedicatedAllocation = dedicatedAllocation_;
+      return *this;
+    }
+
+
+    operator VkDedicatedAllocationBufferCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDedicatedAllocationBufferCreateInfoNV*>( this );
+    }
+
+    operator VkDedicatedAllocationBufferCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDedicatedAllocationBufferCreateInfoNV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DedicatedAllocationBufferCreateInfoNV const& ) const = default;
+#else
+    bool operator==( DedicatedAllocationBufferCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( dedicatedAllocation == rhs.dedicatedAllocation );
+    }
+
+    bool operator!=( DedicatedAllocationBufferCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationBufferCreateInfoNV;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation = {};
+
+  };
+  static_assert( sizeof( DedicatedAllocationBufferCreateInfoNV ) == sizeof( VkDedicatedAllocationBufferCreateInfoNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DedicatedAllocationBufferCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eDedicatedAllocationBufferCreateInfoNV>
+  {
+    using Type = DedicatedAllocationBufferCreateInfoNV;
+  };
+
+  struct DedicatedAllocationImageCreateInfoNV
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDedicatedAllocationImageCreateInfoNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DedicatedAllocationImageCreateInfoNV(VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ = {}) VULKAN_HPP_NOEXCEPT
+    : dedicatedAllocation( dedicatedAllocation_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DedicatedAllocationImageCreateInfoNV( DedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DedicatedAllocationImageCreateInfoNV( VkDedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DedicatedAllocationImageCreateInfoNV & operator=( VkDedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DedicatedAllocationImageCreateInfoNV const *>( &rhs );
+      return *this;
+    }
+
+    DedicatedAllocationImageCreateInfoNV & operator=( DedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DedicatedAllocationImageCreateInfoNV ) );
+      return *this;
+    }
+
+    DedicatedAllocationImageCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DedicatedAllocationImageCreateInfoNV & setDedicatedAllocation( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dedicatedAllocation = dedicatedAllocation_;
+      return *this;
+    }
+
+
+    operator VkDedicatedAllocationImageCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDedicatedAllocationImageCreateInfoNV*>( this );
+    }
+
+    operator VkDedicatedAllocationImageCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDedicatedAllocationImageCreateInfoNV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DedicatedAllocationImageCreateInfoNV const& ) const = default;
+#else
+    bool operator==( DedicatedAllocationImageCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( dedicatedAllocation == rhs.dedicatedAllocation );
+    }
+
+    bool operator!=( DedicatedAllocationImageCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationImageCreateInfoNV;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation = {};
+
+  };
+  static_assert( sizeof( DedicatedAllocationImageCreateInfoNV ) == sizeof( VkDedicatedAllocationImageCreateInfoNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DedicatedAllocationImageCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eDedicatedAllocationImageCreateInfoNV>
+  {
+    using Type = DedicatedAllocationImageCreateInfoNV;
+  };
+
+  struct DedicatedAllocationMemoryAllocateInfoNV
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDedicatedAllocationMemoryAllocateInfoNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DedicatedAllocationMemoryAllocateInfoNV(VULKAN_HPP_NAMESPACE::Image image_ = {}, VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}) VULKAN_HPP_NOEXCEPT
+    : image( image_ ), buffer( buffer_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DedicatedAllocationMemoryAllocateInfoNV( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DedicatedAllocationMemoryAllocateInfoNV( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DedicatedAllocationMemoryAllocateInfoNV & operator=( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DedicatedAllocationMemoryAllocateInfoNV const *>( &rhs );
+      return *this;
+    }
+
+    DedicatedAllocationMemoryAllocateInfoNV & operator=( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DedicatedAllocationMemoryAllocateInfoNV ) );
+      return *this;
+    }
+
+    DedicatedAllocationMemoryAllocateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DedicatedAllocationMemoryAllocateInfoNV & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
+    {
+      image = image_;
+      return *this;
+    }
+
+    DedicatedAllocationMemoryAllocateInfoNV & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      buffer = buffer_;
+      return *this;
+    }
+
+
+    operator VkDedicatedAllocationMemoryAllocateInfoNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDedicatedAllocationMemoryAllocateInfoNV*>( this );
+    }
+
+    operator VkDedicatedAllocationMemoryAllocateInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDedicatedAllocationMemoryAllocateInfoNV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DedicatedAllocationMemoryAllocateInfoNV const& ) const = default;
+#else
+    bool operator==( DedicatedAllocationMemoryAllocateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( image == rhs.image )
+          && ( buffer == rhs.buffer );
+    }
+
+    bool operator!=( DedicatedAllocationMemoryAllocateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationMemoryAllocateInfoNV;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Image image = {};
+    VULKAN_HPP_NAMESPACE::Buffer buffer = {};
+
+  };
+  static_assert( sizeof( DedicatedAllocationMemoryAllocateInfoNV ) == sizeof( VkDedicatedAllocationMemoryAllocateInfoNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DedicatedAllocationMemoryAllocateInfoNV>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eDedicatedAllocationMemoryAllocateInfoNV>
+  {
+    using Type = DedicatedAllocationMemoryAllocateInfoNV;
+  };
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+  class DeferredOperationKHR
+  {
+  public:
+    using CType = VkDeferredOperationKHR;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDeferredOperationKHR;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
+
+  public:
+    VULKAN_HPP_CONSTEXPR DeferredOperationKHR() VULKAN_HPP_NOEXCEPT
+      : m_deferredOperationKHR(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR DeferredOperationKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_deferredOperationKHR(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT DeferredOperationKHR( VkDeferredOperationKHR deferredOperationKHR ) VULKAN_HPP_NOEXCEPT
+      : m_deferredOperationKHR( deferredOperationKHR )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    DeferredOperationKHR & operator=(VkDeferredOperationKHR deferredOperationKHR) VULKAN_HPP_NOEXCEPT
+    {
+      m_deferredOperationKHR = deferredOperationKHR;
+      return *this;
+    }
+#endif
+
+    DeferredOperationKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_deferredOperationKHR = VK_NULL_HANDLE;
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DeferredOperationKHR const& ) const = default;
+#else
+    bool operator==( DeferredOperationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_deferredOperationKHR == rhs.m_deferredOperationKHR;
+    }
+
+    bool operator!=(DeferredOperationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_deferredOperationKHR != rhs.m_deferredOperationKHR;
+    }
+
+    bool operator<(DeferredOperationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_deferredOperationKHR < rhs.m_deferredOperationKHR;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDeferredOperationKHR() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_deferredOperationKHR;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_deferredOperationKHR != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_deferredOperationKHR == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkDeferredOperationKHR m_deferredOperationKHR;
+  };
+  static_assert( sizeof( VULKAN_HPP_NAMESPACE::DeferredOperationKHR ) == sizeof( VkDeferredOperationKHR ), "handle and wrapper have different size!" );
+
+  template <>
+  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eDeferredOperationKHR>
+  {
+    using type = VULKAN_HPP_NAMESPACE::DeferredOperationKHR;
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDeferredOperationKHR>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::DeferredOperationKHR;
+  };
+
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DeferredOperationKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+  struct DeferredOperationInfoKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeferredOperationInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DeferredOperationInfoKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR operationHandle_ = {}) VULKAN_HPP_NOEXCEPT
+    : operationHandle( operationHandle_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DeferredOperationInfoKHR( DeferredOperationInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DeferredOperationInfoKHR( VkDeferredOperationInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DeferredOperationInfoKHR & operator=( VkDeferredOperationInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeferredOperationInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+    DeferredOperationInfoKHR & operator=( DeferredOperationInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DeferredOperationInfoKHR ) );
+      return *this;
+    }
+
+    DeferredOperationInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DeferredOperationInfoKHR & setOperationHandle( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operationHandle_ ) VULKAN_HPP_NOEXCEPT
+    {
+      operationHandle = operationHandle_;
+      return *this;
+    }
+
+
+    operator VkDeferredOperationInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeferredOperationInfoKHR*>( this );
+    }
+
+    operator VkDeferredOperationInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeferredOperationInfoKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DeferredOperationInfoKHR const& ) const = default;
+#else
+    bool operator==( DeferredOperationInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( operationHandle == rhs.operationHandle );
+    }
+
+    bool operator!=( DeferredOperationInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeferredOperationInfoKHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::DeferredOperationKHR operationHandle = {};
+
+  };
+  static_assert( sizeof( DeferredOperationInfoKHR ) == sizeof( VkDeferredOperationInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DeferredOperationInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eDeferredOperationInfoKHR>
+  {
+    using Type = DeferredOperationInfoKHR;
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  struct DescriptorBufferInfo
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DescriptorBufferInfo(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize range_ = {}) VULKAN_HPP_NOEXCEPT
+    : buffer( buffer_ ), offset( offset_ ), range( range_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DescriptorBufferInfo( DescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DescriptorBufferInfo( VkDescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DescriptorBufferInfo & operator=( VkDescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorBufferInfo const *>( &rhs );
+      return *this;
+    }
+
+    DescriptorBufferInfo & operator=( DescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DescriptorBufferInfo ) );
+      return *this;
+    }
+
+    DescriptorBufferInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      buffer = buffer_;
+      return *this;
+    }
+
+    DescriptorBufferInfo & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      offset = offset_;
+      return *this;
+    }
+
+    DescriptorBufferInfo & setRange( VULKAN_HPP_NAMESPACE::DeviceSize range_ ) VULKAN_HPP_NOEXCEPT
+    {
+      range = range_;
+      return *this;
+    }
+
+
+    operator VkDescriptorBufferInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDescriptorBufferInfo*>( this );
+    }
+
+    operator VkDescriptorBufferInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDescriptorBufferInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DescriptorBufferInfo const& ) const = default;
+#else
+    bool operator==( DescriptorBufferInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( buffer == rhs.buffer )
+          && ( offset == rhs.offset )
+          && ( range == rhs.range );
+    }
+
+    bool operator!=( DescriptorBufferInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::Buffer buffer = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize range = {};
+
+  };
+  static_assert( sizeof( DescriptorBufferInfo ) == sizeof( VkDescriptorBufferInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DescriptorBufferInfo>::value, "struct wrapper is not a standard layout!" );
+
+  class Sampler
+  {
+  public:
+    using CType = VkSampler;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSampler;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSampler;
+
+  public:
+    VULKAN_HPP_CONSTEXPR Sampler() VULKAN_HPP_NOEXCEPT
+      : m_sampler(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR Sampler( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_sampler(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT Sampler( VkSampler sampler ) VULKAN_HPP_NOEXCEPT
+      : m_sampler( sampler )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    Sampler & operator=(VkSampler sampler) VULKAN_HPP_NOEXCEPT
+    {
+      m_sampler = sampler;
+      return *this;
+    }
+#endif
+
+    Sampler & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_sampler = VK_NULL_HANDLE;
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( Sampler const& ) const = default;
+#else
+    bool operator==( Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_sampler == rhs.m_sampler;
+    }
+
+    bool operator!=(Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_sampler != rhs.m_sampler;
+    }
+
+    bool operator<(Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_sampler < rhs.m_sampler;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSampler() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_sampler;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_sampler != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_sampler == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkSampler m_sampler;
+  };
+  static_assert( sizeof( VULKAN_HPP_NAMESPACE::Sampler ) == sizeof( VkSampler ), "handle and wrapper have different size!" );
+
+  template <>
+  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eSampler>
+  {
+    using type = VULKAN_HPP_NAMESPACE::Sampler;
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eSampler>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::Sampler;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSampler>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::Sampler;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Sampler>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  class ImageView
+  {
+  public:
+    using CType = VkImageView;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eImageView;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImageView;
+
+  public:
+    VULKAN_HPP_CONSTEXPR ImageView() VULKAN_HPP_NOEXCEPT
+      : m_imageView(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImageView( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_imageView(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT ImageView( VkImageView imageView ) VULKAN_HPP_NOEXCEPT
+      : m_imageView( imageView )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    ImageView & operator=(VkImageView imageView) VULKAN_HPP_NOEXCEPT
+    {
+      m_imageView = imageView;
+      return *this;
+    }
+#endif
+
+    ImageView & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_imageView = VK_NULL_HANDLE;
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ImageView const& ) const = default;
+#else
+    bool operator==( ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_imageView == rhs.m_imageView;
+    }
+
+    bool operator!=(ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_imageView != rhs.m_imageView;
+    }
+
+    bool operator<(ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_imageView < rhs.m_imageView;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkImageView() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_imageView;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_imageView != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_imageView == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkImageView m_imageView;
+  };
+  static_assert( sizeof( VULKAN_HPP_NAMESPACE::ImageView ) == sizeof( VkImageView ), "handle and wrapper have different size!" );
+
+  template <>
+  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eImageView>
+  {
+    using type = VULKAN_HPP_NAMESPACE::ImageView;
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eImageView>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::ImageView;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImageView>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::ImageView;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::ImageView>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  struct DescriptorImageInfo
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DescriptorImageInfo(VULKAN_HPP_NAMESPACE::Sampler sampler_ = {}, VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined) VULKAN_HPP_NOEXCEPT
+    : sampler( sampler_ ), imageView( imageView_ ), imageLayout( imageLayout_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DescriptorImageInfo( DescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DescriptorImageInfo( VkDescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DescriptorImageInfo & operator=( VkDescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorImageInfo const *>( &rhs );
+      return *this;
+    }
+
+    DescriptorImageInfo & operator=( DescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DescriptorImageInfo ) );
+      return *this;
+    }
+
+    DescriptorImageInfo & setSampler( VULKAN_HPP_NAMESPACE::Sampler sampler_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sampler = sampler_;
+      return *this;
+    }
+
+    DescriptorImageInfo & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageView = imageView_;
+      return *this;
+    }
+
+    DescriptorImageInfo & setImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageLayout = imageLayout_;
+      return *this;
+    }
+
+
+    operator VkDescriptorImageInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDescriptorImageInfo*>( this );
+    }
+
+    operator VkDescriptorImageInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDescriptorImageInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DescriptorImageInfo const& ) const = default;
+#else
+    bool operator==( DescriptorImageInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sampler == rhs.sampler )
+          && ( imageView == rhs.imageView )
+          && ( imageLayout == rhs.imageLayout );
+    }
+
+    bool operator!=( DescriptorImageInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::Sampler sampler = {};
+    VULKAN_HPP_NAMESPACE::ImageView imageView = {};
+    VULKAN_HPP_NAMESPACE::ImageLayout imageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+
+  };
+  static_assert( sizeof( DescriptorImageInfo ) == sizeof( VkDescriptorImageInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DescriptorImageInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct DescriptorPoolSize
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DescriptorPoolSize(VULKAN_HPP_NAMESPACE::DescriptorType type_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, uint32_t descriptorCount_ = {}) VULKAN_HPP_NOEXCEPT
+    : type( type_ ), descriptorCount( descriptorCount_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DescriptorPoolSize( DescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DescriptorPoolSize( VkDescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DescriptorPoolSize & operator=( VkDescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorPoolSize const *>( &rhs );
+      return *this;
+    }
+
+    DescriptorPoolSize & operator=( DescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DescriptorPoolSize ) );
+      return *this;
+    }
+
+    DescriptorPoolSize & setType( VULKAN_HPP_NAMESPACE::DescriptorType type_ ) VULKAN_HPP_NOEXCEPT
+    {
+      type = type_;
+      return *this;
+    }
+
+    DescriptorPoolSize & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorCount = descriptorCount_;
+      return *this;
+    }
+
+
+    operator VkDescriptorPoolSize const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDescriptorPoolSize*>( this );
+    }
+
+    operator VkDescriptorPoolSize &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDescriptorPoolSize*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DescriptorPoolSize const& ) const = default;
+#else
+    bool operator==( DescriptorPoolSize const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( type == rhs.type )
+          && ( descriptorCount == rhs.descriptorCount );
+    }
+
+    bool operator!=( DescriptorPoolSize const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::DescriptorType type = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler;
+    uint32_t descriptorCount = {};
+
+  };
+  static_assert( sizeof( DescriptorPoolSize ) == sizeof( VkDescriptorPoolSize ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DescriptorPoolSize>::value, "struct wrapper is not a standard layout!" );
+
+  struct DescriptorPoolCreateInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorPoolCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DescriptorPoolCreateInfo(VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_ = {}, uint32_t maxSets_ = {}, uint32_t poolSizeCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorPoolSize* pPoolSizes_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), maxSets( maxSets_ ), poolSizeCount( poolSizeCount_ ), pPoolSizes( pPoolSizes_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DescriptorPoolCreateInfo( DescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DescriptorPoolCreateInfo( VkDescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    DescriptorPoolCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_, uint32_t maxSets_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorPoolSize> const & poolSizes_ )
+    : flags( flags_ ), maxSets( maxSets_ ), poolSizeCount( static_cast<uint32_t>( poolSizes_.size() ) ), pPoolSizes( poolSizes_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DescriptorPoolCreateInfo & operator=( VkDescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+    DescriptorPoolCreateInfo & operator=( DescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DescriptorPoolCreateInfo ) );
+      return *this;
+    }
+
+    DescriptorPoolCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DescriptorPoolCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    DescriptorPoolCreateInfo & setMaxSets( uint32_t maxSets_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxSets = maxSets_;
+      return *this;
+    }
+
+    DescriptorPoolCreateInfo & setPoolSizeCount( uint32_t poolSizeCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      poolSizeCount = poolSizeCount_;
+      return *this;
+    }
+
+    DescriptorPoolCreateInfo & setPPoolSizes( const VULKAN_HPP_NAMESPACE::DescriptorPoolSize* pPoolSizes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pPoolSizes = pPoolSizes_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    DescriptorPoolCreateInfo & setPoolSizes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorPoolSize> const & poolSizes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      poolSizeCount = static_cast<uint32_t>( poolSizes_.size() );
+      pPoolSizes = poolSizes_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkDescriptorPoolCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDescriptorPoolCreateInfo*>( this );
+    }
+
+    operator VkDescriptorPoolCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDescriptorPoolCreateInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DescriptorPoolCreateInfo const& ) const = default;
+#else
+    bool operator==( DescriptorPoolCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( maxSets == rhs.maxSets )
+          && ( poolSizeCount == rhs.poolSizeCount )
+          && ( pPoolSizes == rhs.pPoolSizes );
+    }
+
+    bool operator!=( DescriptorPoolCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorPoolCreateInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags = {};
+    uint32_t maxSets = {};
+    uint32_t poolSizeCount = {};
+    const VULKAN_HPP_NAMESPACE::DescriptorPoolSize* pPoolSizes = {};
+
+  };
+  static_assert( sizeof( DescriptorPoolCreateInfo ) == sizeof( VkDescriptorPoolCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DescriptorPoolCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eDescriptorPoolCreateInfo>
+  {
+    using Type = DescriptorPoolCreateInfo;
+  };
+
+  struct DescriptorPoolInlineUniformBlockCreateInfoEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorPoolInlineUniformBlockCreateInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DescriptorPoolInlineUniformBlockCreateInfoEXT(uint32_t maxInlineUniformBlockBindings_ = {}) VULKAN_HPP_NOEXCEPT
+    : maxInlineUniformBlockBindings( maxInlineUniformBlockBindings_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DescriptorPoolInlineUniformBlockCreateInfoEXT( DescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DescriptorPoolInlineUniformBlockCreateInfoEXT( VkDescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DescriptorPoolInlineUniformBlockCreateInfoEXT & operator=( VkDescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorPoolInlineUniformBlockCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+    DescriptorPoolInlineUniformBlockCreateInfoEXT & operator=( DescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DescriptorPoolInlineUniformBlockCreateInfoEXT ) );
+      return *this;
+    }
+
+    DescriptorPoolInlineUniformBlockCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DescriptorPoolInlineUniformBlockCreateInfoEXT & setMaxInlineUniformBlockBindings( uint32_t maxInlineUniformBlockBindings_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxInlineUniformBlockBindings = maxInlineUniformBlockBindings_;
+      return *this;
+    }
+
+
+    operator VkDescriptorPoolInlineUniformBlockCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDescriptorPoolInlineUniformBlockCreateInfoEXT*>( this );
+    }
+
+    operator VkDescriptorPoolInlineUniformBlockCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDescriptorPoolInlineUniformBlockCreateInfoEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DescriptorPoolInlineUniformBlockCreateInfoEXT const& ) const = default;
+#else
+    bool operator==( DescriptorPoolInlineUniformBlockCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxInlineUniformBlockBindings == rhs.maxInlineUniformBlockBindings );
+    }
+
+    bool operator!=( DescriptorPoolInlineUniformBlockCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorPoolInlineUniformBlockCreateInfoEXT;
+    const void* pNext = {};
+    uint32_t maxInlineUniformBlockBindings = {};
+
+  };
+  static_assert( sizeof( DescriptorPoolInlineUniformBlockCreateInfoEXT ) == sizeof( VkDescriptorPoolInlineUniformBlockCreateInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DescriptorPoolInlineUniformBlockCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eDescriptorPoolInlineUniformBlockCreateInfoEXT>
+  {
+    using Type = DescriptorPoolInlineUniformBlockCreateInfoEXT;
+  };
+
+  class DescriptorPool
+  {
+  public:
+    using CType = VkDescriptorPool;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorPool;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorPool;
+
+  public:
+    VULKAN_HPP_CONSTEXPR DescriptorPool() VULKAN_HPP_NOEXCEPT
+      : m_descriptorPool(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR DescriptorPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_descriptorPool(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorPool( VkDescriptorPool descriptorPool ) VULKAN_HPP_NOEXCEPT
+      : m_descriptorPool( descriptorPool )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    DescriptorPool & operator=(VkDescriptorPool descriptorPool) VULKAN_HPP_NOEXCEPT
+    {
+      m_descriptorPool = descriptorPool;
+      return *this;
+    }
+#endif
+
+    DescriptorPool & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_descriptorPool = VK_NULL_HANDLE;
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DescriptorPool const& ) const = default;
+#else
+    bool operator==( DescriptorPool const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorPool == rhs.m_descriptorPool;
+    }
+
+    bool operator!=(DescriptorPool const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorPool != rhs.m_descriptorPool;
+    }
+
+    bool operator<(DescriptorPool const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorPool < rhs.m_descriptorPool;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorPool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorPool;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorPool != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorPool == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkDescriptorPool m_descriptorPool;
+  };
+  static_assert( sizeof( VULKAN_HPP_NAMESPACE::DescriptorPool ) == sizeof( VkDescriptorPool ), "handle and wrapper have different size!" );
+
+  template <>
+  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eDescriptorPool>
+  {
+    using type = VULKAN_HPP_NAMESPACE::DescriptorPool;
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorPool>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::DescriptorPool;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorPool>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::DescriptorPool;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DescriptorPool>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  class DescriptorSetLayout
+  {
+  public:
+    using CType = VkDescriptorSetLayout;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSetLayout;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSetLayout;
+
+  public:
+    VULKAN_HPP_CONSTEXPR DescriptorSetLayout() VULKAN_HPP_NOEXCEPT
+      : m_descriptorSetLayout(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR DescriptorSetLayout( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_descriptorSetLayout(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorSetLayout( VkDescriptorSetLayout descriptorSetLayout ) VULKAN_HPP_NOEXCEPT
+      : m_descriptorSetLayout( descriptorSetLayout )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    DescriptorSetLayout & operator=(VkDescriptorSetLayout descriptorSetLayout) VULKAN_HPP_NOEXCEPT
+    {
+      m_descriptorSetLayout = descriptorSetLayout;
+      return *this;
+    }
+#endif
+
+    DescriptorSetLayout & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_descriptorSetLayout = VK_NULL_HANDLE;
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DescriptorSetLayout const& ) const = default;
+#else
+    bool operator==( DescriptorSetLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorSetLayout == rhs.m_descriptorSetLayout;
+    }
+
+    bool operator!=(DescriptorSetLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorSetLayout != rhs.m_descriptorSetLayout;
+    }
+
+    bool operator<(DescriptorSetLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorSetLayout < rhs.m_descriptorSetLayout;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorSetLayout() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorSetLayout;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorSetLayout != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorSetLayout == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkDescriptorSetLayout m_descriptorSetLayout;
+  };
+  static_assert( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayout ) == sizeof( VkDescriptorSetLayout ), "handle and wrapper have different size!" );
+
+  template <>
+  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eDescriptorSetLayout>
+  {
+    using type = VULKAN_HPP_NAMESPACE::DescriptorSetLayout;
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSetLayout>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::DescriptorSetLayout;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSetLayout>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::DescriptorSetLayout;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DescriptorSetLayout>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  struct DescriptorSetAllocateInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetAllocateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DescriptorSetAllocateInfo(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_ = {}, uint32_t descriptorSetCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts_ = {}) VULKAN_HPP_NOEXCEPT
+    : descriptorPool( descriptorPool_ ), descriptorSetCount( descriptorSetCount_ ), pSetLayouts( pSetLayouts_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DescriptorSetAllocateInfo( DescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DescriptorSetAllocateInfo( VkDescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    DescriptorSetAllocateInfo( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayout> const & setLayouts_ )
+    : descriptorPool( descriptorPool_ ), descriptorSetCount( static_cast<uint32_t>( setLayouts_.size() ) ), pSetLayouts( setLayouts_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DescriptorSetAllocateInfo & operator=( VkDescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo const *>( &rhs );
+      return *this;
+    }
+
+    DescriptorSetAllocateInfo & operator=( DescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DescriptorSetAllocateInfo ) );
+      return *this;
+    }
+
+    DescriptorSetAllocateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DescriptorSetAllocateInfo & setDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorPool = descriptorPool_;
+      return *this;
+    }
+
+    DescriptorSetAllocateInfo & setDescriptorSetCount( uint32_t descriptorSetCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorSetCount = descriptorSetCount_;
+      return *this;
+    }
+
+    DescriptorSetAllocateInfo & setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pSetLayouts = pSetLayouts_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    DescriptorSetAllocateInfo & setSetLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayout> const & setLayouts_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorSetCount = static_cast<uint32_t>( setLayouts_.size() );
+      pSetLayouts = setLayouts_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkDescriptorSetAllocateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDescriptorSetAllocateInfo*>( this );
+    }
+
+    operator VkDescriptorSetAllocateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDescriptorSetAllocateInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DescriptorSetAllocateInfo const& ) const = default;
+#else
+    bool operator==( DescriptorSetAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( descriptorPool == rhs.descriptorPool )
+          && ( descriptorSetCount == rhs.descriptorSetCount )
+          && ( pSetLayouts == rhs.pSetLayouts );
+    }
+
+    bool operator!=( DescriptorSetAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetAllocateInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool = {};
+    uint32_t descriptorSetCount = {};
+    const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts = {};
+
+  };
+  static_assert( sizeof( DescriptorSetAllocateInfo ) == sizeof( VkDescriptorSetAllocateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DescriptorSetAllocateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eDescriptorSetAllocateInfo>
+  {
+    using Type = DescriptorSetAllocateInfo;
+  };
+
+  struct DescriptorSetLayoutBinding
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBinding(uint32_t binding_ = {}, VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, uint32_t descriptorCount_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, const VULKAN_HPP_NAMESPACE::Sampler* pImmutableSamplers_ = {}) VULKAN_HPP_NOEXCEPT
+    : binding( binding_ ), descriptorType( descriptorType_ ), descriptorCount( descriptorCount_ ), stageFlags( stageFlags_ ), pImmutableSamplers( pImmutableSamplers_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBinding( DescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DescriptorSetLayoutBinding( VkDescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    DescriptorSetLayoutBinding( uint32_t binding_, VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Sampler> const & immutableSamplers_ )
+    : binding( binding_ ), descriptorType( descriptorType_ ), descriptorCount( static_cast<uint32_t>( immutableSamplers_.size() ) ), stageFlags( stageFlags_ ), pImmutableSamplers( immutableSamplers_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DescriptorSetLayoutBinding & operator=( VkDescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding const *>( &rhs );
+      return *this;
+    }
+
+    DescriptorSetLayoutBinding & operator=( DescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DescriptorSetLayoutBinding ) );
+      return *this;
+    }
+
+    DescriptorSetLayoutBinding & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT
+    {
+      binding = binding_;
+      return *this;
+    }
+
+    DescriptorSetLayoutBinding & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorType = descriptorType_;
+      return *this;
+    }
+
+    DescriptorSetLayoutBinding & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorCount = descriptorCount_;
+      return *this;
+    }
+
+    DescriptorSetLayoutBinding & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stageFlags = stageFlags_;
+      return *this;
+    }
+
+    DescriptorSetLayoutBinding & setPImmutableSamplers( const VULKAN_HPP_NAMESPACE::Sampler* pImmutableSamplers_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pImmutableSamplers = pImmutableSamplers_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    DescriptorSetLayoutBinding & setImmutableSamplers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Sampler> const & immutableSamplers_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorCount = static_cast<uint32_t>( immutableSamplers_.size() );
+      pImmutableSamplers = immutableSamplers_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkDescriptorSetLayoutBinding const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDescriptorSetLayoutBinding*>( this );
+    }
+
+    operator VkDescriptorSetLayoutBinding &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDescriptorSetLayoutBinding*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DescriptorSetLayoutBinding const& ) const = default;
+#else
+    bool operator==( DescriptorSetLayoutBinding const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( binding == rhs.binding )
+          && ( descriptorType == rhs.descriptorType )
+          && ( descriptorCount == rhs.descriptorCount )
+          && ( stageFlags == rhs.stageFlags )
+          && ( pImmutableSamplers == rhs.pImmutableSamplers );
+    }
+
+    bool operator!=( DescriptorSetLayoutBinding const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    uint32_t binding = {};
+    VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler;
+    uint32_t descriptorCount = {};
+    VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {};
+    const VULKAN_HPP_NAMESPACE::Sampler* pImmutableSamplers = {};
+
+  };
+  static_assert( sizeof( DescriptorSetLayoutBinding ) == sizeof( VkDescriptorSetLayoutBinding ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DescriptorSetLayoutBinding>::value, "struct wrapper is not a standard layout!" );
+
+  struct DescriptorSetLayoutBindingFlagsCreateInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBindingFlagsCreateInfo(uint32_t bindingCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags* pBindingFlags_ = {}) VULKAN_HPP_NOEXCEPT
+    : bindingCount( bindingCount_ ), pBindingFlags( pBindingFlags_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBindingFlagsCreateInfo( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DescriptorSetLayoutBindingFlagsCreateInfo( VkDescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    DescriptorSetLayoutBindingFlagsCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags> const & bindingFlags_ )
+    : bindingCount( static_cast<uint32_t>( bindingFlags_.size() ) ), pBindingFlags( bindingFlags_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DescriptorSetLayoutBindingFlagsCreateInfo & operator=( VkDescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBindingFlagsCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+    DescriptorSetLayoutBindingFlagsCreateInfo & operator=( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DescriptorSetLayoutBindingFlagsCreateInfo ) );
+      return *this;
+    }
+
+    DescriptorSetLayoutBindingFlagsCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DescriptorSetLayoutBindingFlagsCreateInfo & setBindingCount( uint32_t bindingCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bindingCount = bindingCount_;
+      return *this;
+    }
+
+    DescriptorSetLayoutBindingFlagsCreateInfo & setPBindingFlags( const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags* pBindingFlags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pBindingFlags = pBindingFlags_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    DescriptorSetLayoutBindingFlagsCreateInfo & setBindingFlags( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags> const & bindingFlags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bindingCount = static_cast<uint32_t>( bindingFlags_.size() );
+      pBindingFlags = bindingFlags_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkDescriptorSetLayoutBindingFlagsCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDescriptorSetLayoutBindingFlagsCreateInfo*>( this );
+    }
+
+    operator VkDescriptorSetLayoutBindingFlagsCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDescriptorSetLayoutBindingFlagsCreateInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DescriptorSetLayoutBindingFlagsCreateInfo const& ) const = default;
+#else
+    bool operator==( DescriptorSetLayoutBindingFlagsCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( bindingCount == rhs.bindingCount )
+          && ( pBindingFlags == rhs.pBindingFlags );
+    }
+
+    bool operator!=( DescriptorSetLayoutBindingFlagsCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo;
+    const void* pNext = {};
+    uint32_t bindingCount = {};
+    const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags* pBindingFlags = {};
+
+  };
+  static_assert( sizeof( DescriptorSetLayoutBindingFlagsCreateInfo ) == sizeof( VkDescriptorSetLayoutBindingFlagsCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DescriptorSetLayoutBindingFlagsCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo>
+  {
+    using Type = DescriptorSetLayoutBindingFlagsCreateInfo;
+  };
+  using DescriptorSetLayoutBindingFlagsCreateInfoEXT = DescriptorSetLayoutBindingFlagsCreateInfo;
+
+  struct DescriptorSetLayoutCreateInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateInfo(VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_ = {}, uint32_t bindingCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding* pBindings_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), bindingCount( bindingCount_ ), pBindings( pBindings_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateInfo( DescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DescriptorSetLayoutCreateInfo( VkDescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    DescriptorSetLayoutCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding> const & bindings_ )
+    : flags( flags_ ), bindingCount( static_cast<uint32_t>( bindings_.size() ) ), pBindings( bindings_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DescriptorSetLayoutCreateInfo & operator=( VkDescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+    DescriptorSetLayoutCreateInfo & operator=( DescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DescriptorSetLayoutCreateInfo ) );
+      return *this;
+    }
+
+    DescriptorSetLayoutCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DescriptorSetLayoutCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    DescriptorSetLayoutCreateInfo & setBindingCount( uint32_t bindingCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bindingCount = bindingCount_;
+      return *this;
+    }
+
+    DescriptorSetLayoutCreateInfo & setPBindings( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding* pBindings_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pBindings = pBindings_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    DescriptorSetLayoutCreateInfo & setBindings( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding> const & bindings_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bindingCount = static_cast<uint32_t>( bindings_.size() );
+      pBindings = bindings_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkDescriptorSetLayoutCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDescriptorSetLayoutCreateInfo*>( this );
+    }
+
+    operator VkDescriptorSetLayoutCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDescriptorSetLayoutCreateInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DescriptorSetLayoutCreateInfo const& ) const = default;
+#else
+    bool operator==( DescriptorSetLayoutCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( bindingCount == rhs.bindingCount )
+          && ( pBindings == rhs.pBindings );
+    }
+
+    bool operator!=( DescriptorSetLayoutCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutCreateInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags = {};
+    uint32_t bindingCount = {};
+    const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding* pBindings = {};
+
+  };
+  static_assert( sizeof( DescriptorSetLayoutCreateInfo ) == sizeof( VkDescriptorSetLayoutCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DescriptorSetLayoutCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eDescriptorSetLayoutCreateInfo>
+  {
+    using Type = DescriptorSetLayoutCreateInfo;
+  };
+
+  struct DescriptorSetLayoutSupport
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutSupport;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DescriptorSetLayoutSupport(VULKAN_HPP_NAMESPACE::Bool32 supported_ = {}) VULKAN_HPP_NOEXCEPT
+    : supported( supported_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DescriptorSetLayoutSupport( DescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DescriptorSetLayoutSupport( VkDescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DescriptorSetLayoutSupport & operator=( VkDescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport const *>( &rhs );
+      return *this;
+    }
+
+    DescriptorSetLayoutSupport & operator=( DescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DescriptorSetLayoutSupport ) );
+      return *this;
+    }
+
+
+    operator VkDescriptorSetLayoutSupport const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDescriptorSetLayoutSupport*>( this );
+    }
+
+    operator VkDescriptorSetLayoutSupport &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDescriptorSetLayoutSupport*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DescriptorSetLayoutSupport const& ) const = default;
+#else
+    bool operator==( DescriptorSetLayoutSupport const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( supported == rhs.supported );
+    }
+
+    bool operator!=( DescriptorSetLayoutSupport const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutSupport;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 supported = {};
+
+  };
+  static_assert( sizeof( DescriptorSetLayoutSupport ) == sizeof( VkDescriptorSetLayoutSupport ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DescriptorSetLayoutSupport>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eDescriptorSetLayoutSupport>
+  {
+    using Type = DescriptorSetLayoutSupport;
+  };
+  using DescriptorSetLayoutSupportKHR = DescriptorSetLayoutSupport;
+
+  struct DescriptorSetVariableDescriptorCountAllocateInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountAllocateInfo(uint32_t descriptorSetCount_ = {}, const uint32_t* pDescriptorCounts_ = {}) VULKAN_HPP_NOEXCEPT
+    : descriptorSetCount( descriptorSetCount_ ), pDescriptorCounts( pDescriptorCounts_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountAllocateInfo( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DescriptorSetVariableDescriptorCountAllocateInfo( VkDescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    DescriptorSetVariableDescriptorCountAllocateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & descriptorCounts_ )
+    : descriptorSetCount( static_cast<uint32_t>( descriptorCounts_.size() ) ), pDescriptorCounts( descriptorCounts_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DescriptorSetVariableDescriptorCountAllocateInfo & operator=( VkDescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountAllocateInfo const *>( &rhs );
+      return *this;
+    }
+
+    DescriptorSetVariableDescriptorCountAllocateInfo & operator=( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DescriptorSetVariableDescriptorCountAllocateInfo ) );
+      return *this;
+    }
+
+    DescriptorSetVariableDescriptorCountAllocateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DescriptorSetVariableDescriptorCountAllocateInfo & setDescriptorSetCount( uint32_t descriptorSetCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorSetCount = descriptorSetCount_;
+      return *this;
+    }
+
+    DescriptorSetVariableDescriptorCountAllocateInfo & setPDescriptorCounts( const uint32_t* pDescriptorCounts_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pDescriptorCounts = pDescriptorCounts_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    DescriptorSetVariableDescriptorCountAllocateInfo & setDescriptorCounts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & descriptorCounts_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorSetCount = static_cast<uint32_t>( descriptorCounts_.size() );
+      pDescriptorCounts = descriptorCounts_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkDescriptorSetVariableDescriptorCountAllocateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDescriptorSetVariableDescriptorCountAllocateInfo*>( this );
+    }
+
+    operator VkDescriptorSetVariableDescriptorCountAllocateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDescriptorSetVariableDescriptorCountAllocateInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DescriptorSetVariableDescriptorCountAllocateInfo const& ) const = default;
+#else
+    bool operator==( DescriptorSetVariableDescriptorCountAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( descriptorSetCount == rhs.descriptorSetCount )
+          && ( pDescriptorCounts == rhs.pDescriptorCounts );
+    }
+
+    bool operator!=( DescriptorSetVariableDescriptorCountAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo;
+    const void* pNext = {};
+    uint32_t descriptorSetCount = {};
+    const uint32_t* pDescriptorCounts = {};
+
+  };
+  static_assert( sizeof( DescriptorSetVariableDescriptorCountAllocateInfo ) == sizeof( VkDescriptorSetVariableDescriptorCountAllocateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DescriptorSetVariableDescriptorCountAllocateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo>
+  {
+    using Type = DescriptorSetVariableDescriptorCountAllocateInfo;
+  };
+  using DescriptorSetVariableDescriptorCountAllocateInfoEXT = DescriptorSetVariableDescriptorCountAllocateInfo;
+
+  struct DescriptorSetVariableDescriptorCountLayoutSupport
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountLayoutSupport(uint32_t maxVariableDescriptorCount_ = {}) VULKAN_HPP_NOEXCEPT
+    : maxVariableDescriptorCount( maxVariableDescriptorCount_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountLayoutSupport( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DescriptorSetVariableDescriptorCountLayoutSupport( VkDescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DescriptorSetVariableDescriptorCountLayoutSupport & operator=( VkDescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountLayoutSupport const *>( &rhs );
+      return *this;
+    }
+
+    DescriptorSetVariableDescriptorCountLayoutSupport & operator=( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DescriptorSetVariableDescriptorCountLayoutSupport ) );
+      return *this;
+    }
+
+
+    operator VkDescriptorSetVariableDescriptorCountLayoutSupport const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDescriptorSetVariableDescriptorCountLayoutSupport*>( this );
+    }
+
+    operator VkDescriptorSetVariableDescriptorCountLayoutSupport &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDescriptorSetVariableDescriptorCountLayoutSupport*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DescriptorSetVariableDescriptorCountLayoutSupport const& ) const = default;
+#else
+    bool operator==( DescriptorSetVariableDescriptorCountLayoutSupport const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxVariableDescriptorCount == rhs.maxVariableDescriptorCount );
+    }
+
+    bool operator!=( DescriptorSetVariableDescriptorCountLayoutSupport const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport;
+    void* pNext = {};
+    uint32_t maxVariableDescriptorCount = {};
+
+  };
+  static_assert( sizeof( DescriptorSetVariableDescriptorCountLayoutSupport ) == sizeof( VkDescriptorSetVariableDescriptorCountLayoutSupport ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DescriptorSetVariableDescriptorCountLayoutSupport>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport>
+  {
+    using Type = DescriptorSetVariableDescriptorCountLayoutSupport;
+  };
+  using DescriptorSetVariableDescriptorCountLayoutSupportEXT = DescriptorSetVariableDescriptorCountLayoutSupport;
+
+  struct DescriptorUpdateTemplateEntry
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateEntry(uint32_t dstBinding_ = {}, uint32_t dstArrayElement_ = {}, uint32_t descriptorCount_ = {}, VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, size_t offset_ = {}, size_t stride_ = {}) VULKAN_HPP_NOEXCEPT
+    : dstBinding( dstBinding_ ), dstArrayElement( dstArrayElement_ ), descriptorCount( descriptorCount_ ), descriptorType( descriptorType_ ), offset( offset_ ), stride( stride_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateEntry( DescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DescriptorUpdateTemplateEntry( VkDescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DescriptorUpdateTemplateEntry & operator=( VkDescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry const *>( &rhs );
+      return *this;
+    }
+
+    DescriptorUpdateTemplateEntry & operator=( DescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DescriptorUpdateTemplateEntry ) );
+      return *this;
+    }
+
+    DescriptorUpdateTemplateEntry & setDstBinding( uint32_t dstBinding_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstBinding = dstBinding_;
+      return *this;
+    }
+
+    DescriptorUpdateTemplateEntry & setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstArrayElement = dstArrayElement_;
+      return *this;
+    }
+
+    DescriptorUpdateTemplateEntry & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorCount = descriptorCount_;
+      return *this;
+    }
+
+    DescriptorUpdateTemplateEntry & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorType = descriptorType_;
+      return *this;
+    }
+
+    DescriptorUpdateTemplateEntry & setOffset( size_t offset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      offset = offset_;
+      return *this;
+    }
+
+    DescriptorUpdateTemplateEntry & setStride( size_t stride_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stride = stride_;
+      return *this;
+    }
+
+
+    operator VkDescriptorUpdateTemplateEntry const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDescriptorUpdateTemplateEntry*>( this );
+    }
+
+    operator VkDescriptorUpdateTemplateEntry &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDescriptorUpdateTemplateEntry*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DescriptorUpdateTemplateEntry const& ) const = default;
+#else
+    bool operator==( DescriptorUpdateTemplateEntry const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( dstBinding == rhs.dstBinding )
+          && ( dstArrayElement == rhs.dstArrayElement )
+          && ( descriptorCount == rhs.descriptorCount )
+          && ( descriptorType == rhs.descriptorType )
+          && ( offset == rhs.offset )
+          && ( stride == rhs.stride );
+    }
+
+    bool operator!=( DescriptorUpdateTemplateEntry const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    uint32_t dstBinding = {};
+    uint32_t dstArrayElement = {};
+    uint32_t descriptorCount = {};
+    VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler;
+    size_t offset = {};
+    size_t stride = {};
+
+  };
+  static_assert( sizeof( DescriptorUpdateTemplateEntry ) == sizeof( VkDescriptorUpdateTemplateEntry ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DescriptorUpdateTemplateEntry>::value, "struct wrapper is not a standard layout!" );
+  using DescriptorUpdateTemplateEntryKHR = DescriptorUpdateTemplateEntry;
+
+  struct DescriptorUpdateTemplateCreateInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorUpdateTemplateCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateCreateInfo(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_ = {}, uint32_t descriptorUpdateEntryCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry* pDescriptorUpdateEntries_ = {}, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet, VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ = {}, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {}, uint32_t set_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), descriptorUpdateEntryCount( descriptorUpdateEntryCount_ ), pDescriptorUpdateEntries( pDescriptorUpdateEntries_ ), templateType( templateType_ ), descriptorSetLayout( descriptorSetLayout_ ), pipelineBindPoint( pipelineBindPoint_ ), pipelineLayout( pipelineLayout_ ), set( set_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateCreateInfo( DescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DescriptorUpdateTemplateCreateInfo( VkDescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    DescriptorUpdateTemplateCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry> const & descriptorUpdateEntries_, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet, VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ = {}, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {}, uint32_t set_ = {} )
+    : flags( flags_ ), descriptorUpdateEntryCount( static_cast<uint32_t>( descriptorUpdateEntries_.size() ) ), pDescriptorUpdateEntries( descriptorUpdateEntries_.data() ), templateType( templateType_ ), descriptorSetLayout( descriptorSetLayout_ ), pipelineBindPoint( pipelineBindPoint_ ), pipelineLayout( pipelineLayout_ ), set( set_ )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DescriptorUpdateTemplateCreateInfo & operator=( VkDescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+    DescriptorUpdateTemplateCreateInfo & operator=( DescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DescriptorUpdateTemplateCreateInfo ) );
+      return *this;
+    }
+
+    DescriptorUpdateTemplateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DescriptorUpdateTemplateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    DescriptorUpdateTemplateCreateInfo & setDescriptorUpdateEntryCount( uint32_t descriptorUpdateEntryCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorUpdateEntryCount = descriptorUpdateEntryCount_;
+      return *this;
+    }
+
+    DescriptorUpdateTemplateCreateInfo & setPDescriptorUpdateEntries( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry* pDescriptorUpdateEntries_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pDescriptorUpdateEntries = pDescriptorUpdateEntries_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    DescriptorUpdateTemplateCreateInfo & setDescriptorUpdateEntries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry> const & descriptorUpdateEntries_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorUpdateEntryCount = static_cast<uint32_t>( descriptorUpdateEntries_.size() );
+      pDescriptorUpdateEntries = descriptorUpdateEntries_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    DescriptorUpdateTemplateCreateInfo & setTemplateType( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      templateType = templateType_;
+      return *this;
+    }
+
+    DescriptorUpdateTemplateCreateInfo & setDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorSetLayout = descriptorSetLayout_;
+      return *this;
+    }
+
+    DescriptorUpdateTemplateCreateInfo & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipelineBindPoint = pipelineBindPoint_;
+      return *this;
+    }
+
+    DescriptorUpdateTemplateCreateInfo & setPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipelineLayout = pipelineLayout_;
+      return *this;
+    }
+
+    DescriptorUpdateTemplateCreateInfo & setSet( uint32_t set_ ) VULKAN_HPP_NOEXCEPT
+    {
+      set = set_;
+      return *this;
+    }
+
+
+    operator VkDescriptorUpdateTemplateCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo*>( this );
+    }
+
+    operator VkDescriptorUpdateTemplateCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDescriptorUpdateTemplateCreateInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DescriptorUpdateTemplateCreateInfo const& ) const = default;
+#else
+    bool operator==( DescriptorUpdateTemplateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( descriptorUpdateEntryCount == rhs.descriptorUpdateEntryCount )
+          && ( pDescriptorUpdateEntries == rhs.pDescriptorUpdateEntries )
+          && ( templateType == rhs.templateType )
+          && ( descriptorSetLayout == rhs.descriptorSetLayout )
+          && ( pipelineBindPoint == rhs.pipelineBindPoint )
+          && ( pipelineLayout == rhs.pipelineLayout )
+          && ( set == rhs.set );
+    }
+
+    bool operator!=( DescriptorUpdateTemplateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorUpdateTemplateCreateInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags = {};
+    uint32_t descriptorUpdateEntryCount = {};
+    const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry* pDescriptorUpdateEntries = {};
+    VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet;
+    VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout = {};
+    VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
+    VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout = {};
+    uint32_t set = {};
+
+  };
+  static_assert( sizeof( DescriptorUpdateTemplateCreateInfo ) == sizeof( VkDescriptorUpdateTemplateCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DescriptorUpdateTemplateCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eDescriptorUpdateTemplateCreateInfo>
+  {
+    using Type = DescriptorUpdateTemplateCreateInfo;
+  };
+  using DescriptorUpdateTemplateCreateInfoKHR = DescriptorUpdateTemplateCreateInfo;
+
+  struct DeviceQueueCreateInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceQueueCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DeviceQueueCreateInfo(VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ = {}, uint32_t queueFamilyIndex_ = {}, uint32_t queueCount_ = {}, const float* pQueuePriorities_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), queueFamilyIndex( queueFamilyIndex_ ), queueCount( queueCount_ ), pQueuePriorities( pQueuePriorities_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DeviceQueueCreateInfo( DeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DeviceQueueCreateInfo( VkDeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    DeviceQueueCreateInfo( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_, uint32_t queueFamilyIndex_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const float> const & queuePriorities_ )
+    : flags( flags_ ), queueFamilyIndex( queueFamilyIndex_ ), queueCount( static_cast<uint32_t>( queuePriorities_.size() ) ), pQueuePriorities( queuePriorities_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DeviceQueueCreateInfo & operator=( VkDeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+    DeviceQueueCreateInfo & operator=( DeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DeviceQueueCreateInfo ) );
+      return *this;
+    }
+
+    DeviceQueueCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DeviceQueueCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    DeviceQueueCreateInfo & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queueFamilyIndex = queueFamilyIndex_;
+      return *this;
+    }
+
+    DeviceQueueCreateInfo & setQueueCount( uint32_t queueCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queueCount = queueCount_;
+      return *this;
+    }
+
+    DeviceQueueCreateInfo & setPQueuePriorities( const float* pQueuePriorities_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pQueuePriorities = pQueuePriorities_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    DeviceQueueCreateInfo & setQueuePriorities( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const float> const & queuePriorities_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queueCount = static_cast<uint32_t>( queuePriorities_.size() );
+      pQueuePriorities = queuePriorities_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkDeviceQueueCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceQueueCreateInfo*>( this );
+    }
+
+    operator VkDeviceQueueCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceQueueCreateInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DeviceQueueCreateInfo const& ) const = default;
+#else
+    bool operator==( DeviceQueueCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( queueFamilyIndex == rhs.queueFamilyIndex )
+          && ( queueCount == rhs.queueCount )
+          && ( pQueuePriorities == rhs.pQueuePriorities );
+    }
+
+    bool operator!=( DeviceQueueCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueCreateInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags = {};
+    uint32_t queueFamilyIndex = {};
+    uint32_t queueCount = {};
+    const float* pQueuePriorities = {};
+
+  };
+  static_assert( sizeof( DeviceQueueCreateInfo ) == sizeof( VkDeviceQueueCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DeviceQueueCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eDeviceQueueCreateInfo>
+  {
+    using Type = DeviceQueueCreateInfo;
+  };
+
+  struct PhysicalDeviceFeatures
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures(VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray_ = {}, VULKAN_HPP_NAMESPACE::Bool32 independentBlend_ = {}, VULKAN_HPP_NAMESPACE::Bool32 geometryShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 tessellationShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading_ = {}, VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend_ = {}, VULKAN_HPP_NAMESPACE::Bool32 logicOp_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect_ = {}, VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthClamp_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthBounds_ = {}, VULKAN_HPP_NAMESPACE::Bool32 wideLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 largePoints_ = {}, VULKAN_HPP_NAMESPACE::Bool32 alphaToOne_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiViewport_ = {}, VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy_ = {}, VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2_ = {}, VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR_ = {}, VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC_ = {}, VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise_ = {}, VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInt64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInt16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseBinding_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased_ = {}, VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate_ = {}, VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries_ = {}) VULKAN_HPP_NOEXCEPT
+    : robustBufferAccess( robustBufferAccess_ ), fullDrawIndexUint32( fullDrawIndexUint32_ ), imageCubeArray( imageCubeArray_ ), independentBlend( independentBlend_ ), geometryShader( geometryShader_ ), tessellationShader( tessellationShader_ ), sampleRateShading( sampleRateShading_ ), dualSrcBlend( dualSrcBlend_ ), logicOp( logicOp_ ), multiDrawIndirect( multiDrawIndirect_ ), drawIndirectFirstInstance( drawIndirectFirstInstance_ ), depthClamp( depthClamp_ ), depthBiasClamp( depthBiasClamp_ ), fillModeNonSolid( fillModeNonSolid_ ), depthBounds( depthBounds_ ), wideLines( wideLines_ ), largePoints( largePoints_ ), alphaToOne( alphaToOne_ ), multiViewport( multiViewport_ ), samplerAnisotropy( samplerAnisotropy_ ), textureCompressionETC2( textureCompressionETC2_ ), textureCompressionASTC_LDR( textureCompressionASTC_LDR_ ), textureCompressionBC( textureCompressionBC_ ), occlusionQueryPrecise( occlusionQueryPrecise_ ), pipelineStatisticsQuery( pipelineStatisticsQuery_ ), vertexPipelineStoresAndAtomics( vertexPipelineStoresAndAtomics_ ), fragmentStoresAndAtomics( fragmentStoresAndAtomics_ ), shaderTessellationAndGeometryPointSize( shaderTessellationAndGeometryPointSize_ ), shaderImageGatherExtended( shaderImageGatherExtended_ ), shaderStorageImageExtendedFormats( shaderStorageImageExtendedFormats_ ), shaderStorageImageMultisample( shaderStorageImageMultisample_ ), shaderStorageImageReadWithoutFormat( shaderStorageImageReadWithoutFormat_ ), shaderStorageImageWriteWithoutFormat( shaderStorageImageWriteWithoutFormat_ ), shaderUniformBufferArrayDynamicIndexing( shaderUniformBufferArrayDynamicIndexing_ ), shaderSampledImageArrayDynamicIndexing( shaderSampledImageArrayDynamicIndexing_ ), shaderStorageBufferArrayDynamicIndexing( shaderStorageBufferArrayDynamicIndexing_ ), shaderStorageImageArrayDynamicIndexing( shaderStorageImageArrayDynamicIndexing_ ), shaderClipDistance( shaderClipDistance_ ), shaderCullDistance( shaderCullDistance_ ), shaderFloat64( shaderFloat64_ ), shaderInt64( shaderInt64_ ), shaderInt16( shaderInt16_ ), shaderResourceResidency( shaderResourceResidency_ ), shaderResourceMinLod( shaderResourceMinLod_ ), sparseBinding( sparseBinding_ ), sparseResidencyBuffer( sparseResidencyBuffer_ ), sparseResidencyImage2D( sparseResidencyImage2D_ ), sparseResidencyImage3D( sparseResidencyImage3D_ ), sparseResidency2Samples( sparseResidency2Samples_ ), sparseResidency4Samples( sparseResidency4Samples_ ), sparseResidency8Samples( sparseResidency8Samples_ ), sparseResidency16Samples( sparseResidency16Samples_ ), sparseResidencyAliased( sparseResidencyAliased_ ), variableMultisampleRate( variableMultisampleRate_ ), inheritedQueries( inheritedQueries_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures( PhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceFeatures( VkPhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceFeatures & operator=( VkPhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & operator=( PhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceFeatures ) );
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setRobustBufferAccess( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess_ ) VULKAN_HPP_NOEXCEPT
+    {
+      robustBufferAccess = robustBufferAccess_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setFullDrawIndexUint32( VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fullDrawIndexUint32 = fullDrawIndexUint32_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setImageCubeArray( VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageCubeArray = imageCubeArray_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setIndependentBlend( VULKAN_HPP_NAMESPACE::Bool32 independentBlend_ ) VULKAN_HPP_NOEXCEPT
+    {
+      independentBlend = independentBlend_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 geometryShader_ ) VULKAN_HPP_NOEXCEPT
+    {
+      geometryShader = geometryShader_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 tessellationShader_ ) VULKAN_HPP_NOEXCEPT
+    {
+      tessellationShader = tessellationShader_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setSampleRateShading( VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sampleRateShading = sampleRateShading_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setDualSrcBlend( VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dualSrcBlend = dualSrcBlend_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setLogicOp( VULKAN_HPP_NAMESPACE::Bool32 logicOp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      logicOp = logicOp_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setMultiDrawIndirect( VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect_ ) VULKAN_HPP_NOEXCEPT
+    {
+      multiDrawIndirect = multiDrawIndirect_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setDrawIndirectFirstInstance( VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance_ ) VULKAN_HPP_NOEXCEPT
+    {
+      drawIndirectFirstInstance = drawIndirectFirstInstance_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setDepthClamp( VULKAN_HPP_NAMESPACE::Bool32 depthClamp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depthClamp = depthClamp_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setDepthBiasClamp( VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depthBiasClamp = depthBiasClamp_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setFillModeNonSolid( VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fillModeNonSolid = fillModeNonSolid_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setDepthBounds( VULKAN_HPP_NAMESPACE::Bool32 depthBounds_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depthBounds = depthBounds_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setWideLines( VULKAN_HPP_NAMESPACE::Bool32 wideLines_ ) VULKAN_HPP_NOEXCEPT
+    {
+      wideLines = wideLines_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setLargePoints( VULKAN_HPP_NAMESPACE::Bool32 largePoints_ ) VULKAN_HPP_NOEXCEPT
+    {
+      largePoints = largePoints_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setAlphaToOne( VULKAN_HPP_NAMESPACE::Bool32 alphaToOne_ ) VULKAN_HPP_NOEXCEPT
+    {
+      alphaToOne = alphaToOne_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setMultiViewport( VULKAN_HPP_NAMESPACE::Bool32 multiViewport_ ) VULKAN_HPP_NOEXCEPT
+    {
+      multiViewport = multiViewport_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setSamplerAnisotropy( VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy_ ) VULKAN_HPP_NOEXCEPT
+    {
+      samplerAnisotropy = samplerAnisotropy_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setTextureCompressionETC2( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2_ ) VULKAN_HPP_NOEXCEPT
+    {
+      textureCompressionETC2 = textureCompressionETC2_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setTextureCompressionASTC_LDR( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR_ ) VULKAN_HPP_NOEXCEPT
+    {
+      textureCompressionASTC_LDR = textureCompressionASTC_LDR_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setTextureCompressionBC( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC_ ) VULKAN_HPP_NOEXCEPT
+    {
+      textureCompressionBC = textureCompressionBC_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setOcclusionQueryPrecise( VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise_ ) VULKAN_HPP_NOEXCEPT
+    {
+      occlusionQueryPrecise = occlusionQueryPrecise_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setPipelineStatisticsQuery( VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipelineStatisticsQuery = pipelineStatisticsQuery_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setVertexPipelineStoresAndAtomics( VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vertexPipelineStoresAndAtomics = vertexPipelineStoresAndAtomics_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setFragmentStoresAndAtomics( VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fragmentStoresAndAtomics = fragmentStoresAndAtomics_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setShaderTessellationAndGeometryPointSize( VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderTessellationAndGeometryPointSize = shaderTessellationAndGeometryPointSize_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setShaderImageGatherExtended( VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderImageGatherExtended = shaderImageGatherExtended_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setShaderStorageImageExtendedFormats( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderStorageImageExtendedFormats = shaderStorageImageExtendedFormats_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setShaderStorageImageMultisample( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderStorageImageMultisample = shaderStorageImageMultisample_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setShaderStorageImageReadWithoutFormat( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderStorageImageReadWithoutFormat = shaderStorageImageReadWithoutFormat_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setShaderStorageImageWriteWithoutFormat( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderStorageImageWriteWithoutFormat = shaderStorageImageWriteWithoutFormat_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setShaderUniformBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderUniformBufferArrayDynamicIndexing = shaderUniformBufferArrayDynamicIndexing_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setShaderSampledImageArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderSampledImageArrayDynamicIndexing = shaderSampledImageArrayDynamicIndexing_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setShaderStorageBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderStorageBufferArrayDynamicIndexing = shaderStorageBufferArrayDynamicIndexing_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setShaderStorageImageArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderStorageImageArrayDynamicIndexing = shaderStorageImageArrayDynamicIndexing_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setShaderClipDistance( VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderClipDistance = shaderClipDistance_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setShaderCullDistance( VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderCullDistance = shaderCullDistance_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setShaderFloat64( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderFloat64 = shaderFloat64_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setShaderInt64( VULKAN_HPP_NAMESPACE::Bool32 shaderInt64_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderInt64 = shaderInt64_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setShaderInt16( VULKAN_HPP_NAMESPACE::Bool32 shaderInt16_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderInt16 = shaderInt16_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setShaderResourceResidency( VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderResourceResidency = shaderResourceResidency_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setShaderResourceMinLod( VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderResourceMinLod = shaderResourceMinLod_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setSparseBinding( VULKAN_HPP_NAMESPACE::Bool32 sparseBinding_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sparseBinding = sparseBinding_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setSparseResidencyBuffer( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sparseResidencyBuffer = sparseResidencyBuffer_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setSparseResidencyImage2D( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sparseResidencyImage2D = sparseResidencyImage2D_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setSparseResidencyImage3D( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sparseResidencyImage3D = sparseResidencyImage3D_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setSparseResidency2Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sparseResidency2Samples = sparseResidency2Samples_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setSparseResidency4Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sparseResidency4Samples = sparseResidency4Samples_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setSparseResidency8Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sparseResidency8Samples = sparseResidency8Samples_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setSparseResidency16Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sparseResidency16Samples = sparseResidency16Samples_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setSparseResidencyAliased( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sparseResidencyAliased = sparseResidencyAliased_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setVariableMultisampleRate( VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate_ ) VULKAN_HPP_NOEXCEPT
+    {
+      variableMultisampleRate = variableMultisampleRate_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setInheritedQueries( VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries_ ) VULKAN_HPP_NOEXCEPT
+    {
+      inheritedQueries = inheritedQueries_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceFeatures const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceFeatures*>( this );
+    }
+
+    operator VkPhysicalDeviceFeatures &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceFeatures*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceFeatures const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( robustBufferAccess == rhs.robustBufferAccess )
+          && ( fullDrawIndexUint32 == rhs.fullDrawIndexUint32 )
+          && ( imageCubeArray == rhs.imageCubeArray )
+          && ( independentBlend == rhs.independentBlend )
+          && ( geometryShader == rhs.geometryShader )
+          && ( tessellationShader == rhs.tessellationShader )
+          && ( sampleRateShading == rhs.sampleRateShading )
+          && ( dualSrcBlend == rhs.dualSrcBlend )
+          && ( logicOp == rhs.logicOp )
+          && ( multiDrawIndirect == rhs.multiDrawIndirect )
+          && ( drawIndirectFirstInstance == rhs.drawIndirectFirstInstance )
+          && ( depthClamp == rhs.depthClamp )
+          && ( depthBiasClamp == rhs.depthBiasClamp )
+          && ( fillModeNonSolid == rhs.fillModeNonSolid )
+          && ( depthBounds == rhs.depthBounds )
+          && ( wideLines == rhs.wideLines )
+          && ( largePoints == rhs.largePoints )
+          && ( alphaToOne == rhs.alphaToOne )
+          && ( multiViewport == rhs.multiViewport )
+          && ( samplerAnisotropy == rhs.samplerAnisotropy )
+          && ( textureCompressionETC2 == rhs.textureCompressionETC2 )
+          && ( textureCompressionASTC_LDR == rhs.textureCompressionASTC_LDR )
+          && ( textureCompressionBC == rhs.textureCompressionBC )
+          && ( occlusionQueryPrecise == rhs.occlusionQueryPrecise )
+          && ( pipelineStatisticsQuery == rhs.pipelineStatisticsQuery )
+          && ( vertexPipelineStoresAndAtomics == rhs.vertexPipelineStoresAndAtomics )
+          && ( fragmentStoresAndAtomics == rhs.fragmentStoresAndAtomics )
+          && ( shaderTessellationAndGeometryPointSize == rhs.shaderTessellationAndGeometryPointSize )
+          && ( shaderImageGatherExtended == rhs.shaderImageGatherExtended )
+          && ( shaderStorageImageExtendedFormats == rhs.shaderStorageImageExtendedFormats )
+          && ( shaderStorageImageMultisample == rhs.shaderStorageImageMultisample )
+          && ( shaderStorageImageReadWithoutFormat == rhs.shaderStorageImageReadWithoutFormat )
+          && ( shaderStorageImageWriteWithoutFormat == rhs.shaderStorageImageWriteWithoutFormat )
+          && ( shaderUniformBufferArrayDynamicIndexing == rhs.shaderUniformBufferArrayDynamicIndexing )
+          && ( shaderSampledImageArrayDynamicIndexing == rhs.shaderSampledImageArrayDynamicIndexing )
+          && ( shaderStorageBufferArrayDynamicIndexing == rhs.shaderStorageBufferArrayDynamicIndexing )
+          && ( shaderStorageImageArrayDynamicIndexing == rhs.shaderStorageImageArrayDynamicIndexing )
+          && ( shaderClipDistance == rhs.shaderClipDistance )
+          && ( shaderCullDistance == rhs.shaderCullDistance )
+          && ( shaderFloat64 == rhs.shaderFloat64 )
+          && ( shaderInt64 == rhs.shaderInt64 )
+          && ( shaderInt16 == rhs.shaderInt16 )
+          && ( shaderResourceResidency == rhs.shaderResourceResidency )
+          && ( shaderResourceMinLod == rhs.shaderResourceMinLod )
+          && ( sparseBinding == rhs.sparseBinding )
+          && ( sparseResidencyBuffer == rhs.sparseResidencyBuffer )
+          && ( sparseResidencyImage2D == rhs.sparseResidencyImage2D )
+          && ( sparseResidencyImage3D == rhs.sparseResidencyImage3D )
+          && ( sparseResidency2Samples == rhs.sparseResidency2Samples )
+          && ( sparseResidency4Samples == rhs.sparseResidency4Samples )
+          && ( sparseResidency8Samples == rhs.sparseResidency8Samples )
+          && ( sparseResidency16Samples == rhs.sparseResidency16Samples )
+          && ( sparseResidencyAliased == rhs.sparseResidencyAliased )
+          && ( variableMultisampleRate == rhs.variableMultisampleRate )
+          && ( inheritedQueries == rhs.inheritedQueries );
+    }
+
+    bool operator!=( PhysicalDeviceFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess = {};
+    VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray = {};
+    VULKAN_HPP_NAMESPACE::Bool32 independentBlend = {};
+    VULKAN_HPP_NAMESPACE::Bool32 geometryShader = {};
+    VULKAN_HPP_NAMESPACE::Bool32 tessellationShader = {};
+    VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading = {};
+    VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend = {};
+    VULKAN_HPP_NAMESPACE::Bool32 logicOp = {};
+    VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect = {};
+    VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance = {};
+    VULKAN_HPP_NAMESPACE::Bool32 depthClamp = {};
+    VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp = {};
+    VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid = {};
+    VULKAN_HPP_NAMESPACE::Bool32 depthBounds = {};
+    VULKAN_HPP_NAMESPACE::Bool32 wideLines = {};
+    VULKAN_HPP_NAMESPACE::Bool32 largePoints = {};
+    VULKAN_HPP_NAMESPACE::Bool32 alphaToOne = {};
+    VULKAN_HPP_NAMESPACE::Bool32 multiViewport = {};
+    VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy = {};
+    VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR = {};
+    VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC = {};
+    VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise = {};
+    VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery = {};
+    VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics = {};
+    VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderInt64 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderInt16 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod = {};
+    VULKAN_HPP_NAMESPACE::Bool32 sparseBinding = {};
+    VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer = {};
+    VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D = {};
+    VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D = {};
+    VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples = {};
+    VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples = {};
+    VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples = {};
+    VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples = {};
+    VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased = {};
+    VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate = {};
+    VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceFeatures ) == sizeof( VkPhysicalDeviceFeatures ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceFeatures>::value, "struct wrapper is not a standard layout!" );
+
+  struct DeviceCreateInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DeviceCreateInfo(VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_ = {}, uint32_t queueCreateInfoCount_ = {}, const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo* pQueueCreateInfos_ = {}, uint32_t enabledLayerCount_ = {}, const char* const * ppEnabledLayerNames_ = {}, uint32_t enabledExtensionCount_ = {}, const char* const * ppEnabledExtensionNames_ = {}, const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures* pEnabledFeatures_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), queueCreateInfoCount( queueCreateInfoCount_ ), pQueueCreateInfos( pQueueCreateInfos_ ), enabledLayerCount( enabledLayerCount_ ), ppEnabledLayerNames( ppEnabledLayerNames_ ), enabledExtensionCount( enabledExtensionCount_ ), ppEnabledExtensionNames( ppEnabledExtensionNames_ ), pEnabledFeatures( pEnabledFeatures_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DeviceCreateInfo( DeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DeviceCreateInfo( VkDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    DeviceCreateInfo( VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo> const & queueCreateInfos_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char* const > const & pEnabledLayerNames_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char* const > const & pEnabledExtensionNames_ = {}, const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures* pEnabledFeatures_ = {} )
+    : flags( flags_ ), queueCreateInfoCount( static_cast<uint32_t>( queueCreateInfos_.size() ) ), pQueueCreateInfos( queueCreateInfos_.data() ), enabledLayerCount( static_cast<uint32_t>( pEnabledLayerNames_.size() ) ), ppEnabledLayerNames( pEnabledLayerNames_.data() ), enabledExtensionCount( static_cast<uint32_t>( pEnabledExtensionNames_.size() ) ), ppEnabledExtensionNames( pEnabledExtensionNames_.data() ), pEnabledFeatures( pEnabledFeatures_ )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DeviceCreateInfo & operator=( VkDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+    DeviceCreateInfo & operator=( DeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DeviceCreateInfo ) );
+      return *this;
+    }
+
+    DeviceCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DeviceCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    DeviceCreateInfo & setQueueCreateInfoCount( uint32_t queueCreateInfoCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queueCreateInfoCount = queueCreateInfoCount_;
+      return *this;
+    }
+
+    DeviceCreateInfo & setPQueueCreateInfos( const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo* pQueueCreateInfos_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pQueueCreateInfos = pQueueCreateInfos_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    DeviceCreateInfo & setQueueCreateInfos( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo> const & queueCreateInfos_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queueCreateInfoCount = static_cast<uint32_t>( queueCreateInfos_.size() );
+      pQueueCreateInfos = queueCreateInfos_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    DeviceCreateInfo & setEnabledLayerCount( uint32_t enabledLayerCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      enabledLayerCount = enabledLayerCount_;
+      return *this;
+    }
+
+    DeviceCreateInfo & setPpEnabledLayerNames( const char* const * ppEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT
+    {
+      ppEnabledLayerNames = ppEnabledLayerNames_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    DeviceCreateInfo & setPEnabledLayerNames( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char* const > const & pEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT
+    {
+      enabledLayerCount = static_cast<uint32_t>( pEnabledLayerNames_.size() );
+      ppEnabledLayerNames = pEnabledLayerNames_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    DeviceCreateInfo & setEnabledExtensionCount( uint32_t enabledExtensionCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      enabledExtensionCount = enabledExtensionCount_;
+      return *this;
+    }
+
+    DeviceCreateInfo & setPpEnabledExtensionNames( const char* const * ppEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT
+    {
+      ppEnabledExtensionNames = ppEnabledExtensionNames_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    DeviceCreateInfo & setPEnabledExtensionNames( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char* const > const & pEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT
+    {
+      enabledExtensionCount = static_cast<uint32_t>( pEnabledExtensionNames_.size() );
+      ppEnabledExtensionNames = pEnabledExtensionNames_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    DeviceCreateInfo & setPEnabledFeatures( const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures* pEnabledFeatures_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pEnabledFeatures = pEnabledFeatures_;
+      return *this;
+    }
+
+
+    operator VkDeviceCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceCreateInfo*>( this );
+    }
+
+    operator VkDeviceCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceCreateInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DeviceCreateInfo const& ) const = default;
+#else
+    bool operator==( DeviceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( queueCreateInfoCount == rhs.queueCreateInfoCount )
+          && ( pQueueCreateInfos == rhs.pQueueCreateInfos )
+          && ( enabledLayerCount == rhs.enabledLayerCount )
+          && ( ppEnabledLayerNames == rhs.ppEnabledLayerNames )
+          && ( enabledExtensionCount == rhs.enabledExtensionCount )
+          && ( ppEnabledExtensionNames == rhs.ppEnabledExtensionNames )
+          && ( pEnabledFeatures == rhs.pEnabledFeatures );
+    }
+
+    bool operator!=( DeviceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceCreateInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags = {};
+    uint32_t queueCreateInfoCount = {};
+    const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo* pQueueCreateInfos = {};
+    uint32_t enabledLayerCount = {};
+    const char* const * ppEnabledLayerNames = {};
+    uint32_t enabledExtensionCount = {};
+    const char* const * ppEnabledExtensionNames = {};
+    const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures* pEnabledFeatures = {};
+
+  };
+  static_assert( sizeof( DeviceCreateInfo ) == sizeof( VkDeviceCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DeviceCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eDeviceCreateInfo>
+  {
+    using Type = DeviceCreateInfo;
+  };
+
+  struct DeviceDeviceMemoryReportCreateInfoEXT
+  {
+    static const bool allowDuplicate = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceDeviceMemoryReportCreateInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DeviceDeviceMemoryReportCreateInfoEXT(VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags_ = {}, PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback_ = {}, void* pUserData_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), pfnUserCallback( pfnUserCallback_ ), pUserData( pUserData_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DeviceDeviceMemoryReportCreateInfoEXT( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DeviceDeviceMemoryReportCreateInfoEXT( VkDeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DeviceDeviceMemoryReportCreateInfoEXT & operator=( VkDeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceDeviceMemoryReportCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+    DeviceDeviceMemoryReportCreateInfoEXT & operator=( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DeviceDeviceMemoryReportCreateInfoEXT ) );
+      return *this;
+    }
+
+    DeviceDeviceMemoryReportCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DeviceDeviceMemoryReportCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    DeviceDeviceMemoryReportCreateInfoEXT & setPfnUserCallback( PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pfnUserCallback = pfnUserCallback_;
+      return *this;
+    }
+
+    DeviceDeviceMemoryReportCreateInfoEXT & setPUserData( void* pUserData_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pUserData = pUserData_;
+      return *this;
+    }
+
+
+    operator VkDeviceDeviceMemoryReportCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceDeviceMemoryReportCreateInfoEXT*>( this );
+    }
+
+    operator VkDeviceDeviceMemoryReportCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceDeviceMemoryReportCreateInfoEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DeviceDeviceMemoryReportCreateInfoEXT const& ) const = default;
+#else
+    bool operator==( DeviceDeviceMemoryReportCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( pfnUserCallback == rhs.pfnUserCallback )
+          && ( pUserData == rhs.pUserData );
+    }
+
+    bool operator!=( DeviceDeviceMemoryReportCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceDeviceMemoryReportCreateInfoEXT;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags = {};
+    PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback = {};
+    void* pUserData = {};
+
+  };
+  static_assert( sizeof( DeviceDeviceMemoryReportCreateInfoEXT ) == sizeof( VkDeviceDeviceMemoryReportCreateInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DeviceDeviceMemoryReportCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eDeviceDeviceMemoryReportCreateInfoEXT>
+  {
+    using Type = DeviceDeviceMemoryReportCreateInfoEXT;
+  };
+
+  struct DeviceDiagnosticsConfigCreateInfoNV
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceDiagnosticsConfigCreateInfoNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigCreateInfoNV(VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigCreateInfoNV( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DeviceDiagnosticsConfigCreateInfoNV( VkDeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DeviceDiagnosticsConfigCreateInfoNV & operator=( VkDeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigCreateInfoNV const *>( &rhs );
+      return *this;
+    }
+
+    DeviceDiagnosticsConfigCreateInfoNV & operator=( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DeviceDiagnosticsConfigCreateInfoNV ) );
+      return *this;
+    }
+
+    DeviceDiagnosticsConfigCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DeviceDiagnosticsConfigCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+
+    operator VkDeviceDiagnosticsConfigCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceDiagnosticsConfigCreateInfoNV*>( this );
+    }
+
+    operator VkDeviceDiagnosticsConfigCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceDiagnosticsConfigCreateInfoNV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DeviceDiagnosticsConfigCreateInfoNV const& ) const = default;
+#else
+    bool operator==( DeviceDiagnosticsConfigCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags );
+    }
+
+    bool operator!=( DeviceDiagnosticsConfigCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceDiagnosticsConfigCreateInfoNV;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags = {};
+
+  };
+  static_assert( sizeof( DeviceDiagnosticsConfigCreateInfoNV ) == sizeof( VkDeviceDiagnosticsConfigCreateInfoNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DeviceDiagnosticsConfigCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eDeviceDiagnosticsConfigCreateInfoNV>
+  {
+    using Type = DeviceDiagnosticsConfigCreateInfoNV;
+  };
+
+  struct DeviceEventInfoEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceEventInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DeviceEventInfoEXT(VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent_ = VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT::eDisplayHotplug) VULKAN_HPP_NOEXCEPT
+    : deviceEvent( deviceEvent_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DeviceEventInfoEXT( DeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DeviceEventInfoEXT( VkDeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DeviceEventInfoEXT & operator=( VkDeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+    DeviceEventInfoEXT & operator=( DeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DeviceEventInfoEXT ) );
+      return *this;
+    }
+
+    DeviceEventInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DeviceEventInfoEXT & setDeviceEvent( VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent_ ) VULKAN_HPP_NOEXCEPT
+    {
+      deviceEvent = deviceEvent_;
+      return *this;
+    }
+
+
+    operator VkDeviceEventInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceEventInfoEXT*>( this );
+    }
+
+    operator VkDeviceEventInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceEventInfoEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DeviceEventInfoEXT const& ) const = default;
+#else
+    bool operator==( DeviceEventInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( deviceEvent == rhs.deviceEvent );
+    }
+
+    bool operator!=( DeviceEventInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceEventInfoEXT;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent = VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT::eDisplayHotplug;
+
+  };
+  static_assert( sizeof( DeviceEventInfoEXT ) == sizeof( VkDeviceEventInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DeviceEventInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eDeviceEventInfoEXT>
+  {
+    using Type = DeviceEventInfoEXT;
+  };
+
+  struct DeviceGroupBindSparseInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupBindSparseInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DeviceGroupBindSparseInfo(uint32_t resourceDeviceIndex_ = {}, uint32_t memoryDeviceIndex_ = {}) VULKAN_HPP_NOEXCEPT
+    : resourceDeviceIndex( resourceDeviceIndex_ ), memoryDeviceIndex( memoryDeviceIndex_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DeviceGroupBindSparseInfo( DeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DeviceGroupBindSparseInfo( VkDeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DeviceGroupBindSparseInfo & operator=( VkDeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupBindSparseInfo const *>( &rhs );
+      return *this;
+    }
+
+    DeviceGroupBindSparseInfo & operator=( DeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DeviceGroupBindSparseInfo ) );
+      return *this;
+    }
+
+    DeviceGroupBindSparseInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DeviceGroupBindSparseInfo & setResourceDeviceIndex( uint32_t resourceDeviceIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      resourceDeviceIndex = resourceDeviceIndex_;
+      return *this;
+    }
+
+    DeviceGroupBindSparseInfo & setMemoryDeviceIndex( uint32_t memoryDeviceIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memoryDeviceIndex = memoryDeviceIndex_;
+      return *this;
+    }
+
+
+    operator VkDeviceGroupBindSparseInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceGroupBindSparseInfo*>( this );
+    }
+
+    operator VkDeviceGroupBindSparseInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceGroupBindSparseInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DeviceGroupBindSparseInfo const& ) const = default;
+#else
+    bool operator==( DeviceGroupBindSparseInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( resourceDeviceIndex == rhs.resourceDeviceIndex )
+          && ( memoryDeviceIndex == rhs.memoryDeviceIndex );
+    }
+
+    bool operator!=( DeviceGroupBindSparseInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupBindSparseInfo;
+    const void* pNext = {};
+    uint32_t resourceDeviceIndex = {};
+    uint32_t memoryDeviceIndex = {};
+
+  };
+  static_assert( sizeof( DeviceGroupBindSparseInfo ) == sizeof( VkDeviceGroupBindSparseInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DeviceGroupBindSparseInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eDeviceGroupBindSparseInfo>
+  {
+    using Type = DeviceGroupBindSparseInfo;
+  };
+  using DeviceGroupBindSparseInfoKHR = DeviceGroupBindSparseInfo;
+
+  struct DeviceGroupCommandBufferBeginInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupCommandBufferBeginInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DeviceGroupCommandBufferBeginInfo(uint32_t deviceMask_ = {}) VULKAN_HPP_NOEXCEPT
+    : deviceMask( deviceMask_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DeviceGroupCommandBufferBeginInfo( DeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DeviceGroupCommandBufferBeginInfo( VkDeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DeviceGroupCommandBufferBeginInfo & operator=( VkDeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupCommandBufferBeginInfo const *>( &rhs );
+      return *this;
+    }
+
+    DeviceGroupCommandBufferBeginInfo & operator=( DeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DeviceGroupCommandBufferBeginInfo ) );
+      return *this;
+    }
+
+    DeviceGroupCommandBufferBeginInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DeviceGroupCommandBufferBeginInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      deviceMask = deviceMask_;
+      return *this;
+    }
+
+
+    operator VkDeviceGroupCommandBufferBeginInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceGroupCommandBufferBeginInfo*>( this );
+    }
+
+    operator VkDeviceGroupCommandBufferBeginInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceGroupCommandBufferBeginInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DeviceGroupCommandBufferBeginInfo const& ) const = default;
+#else
+    bool operator==( DeviceGroupCommandBufferBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( deviceMask == rhs.deviceMask );
+    }
+
+    bool operator!=( DeviceGroupCommandBufferBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupCommandBufferBeginInfo;
+    const void* pNext = {};
+    uint32_t deviceMask = {};
+
+  };
+  static_assert( sizeof( DeviceGroupCommandBufferBeginInfo ) == sizeof( VkDeviceGroupCommandBufferBeginInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DeviceGroupCommandBufferBeginInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eDeviceGroupCommandBufferBeginInfo>
+  {
+    using Type = DeviceGroupCommandBufferBeginInfo;
+  };
+  using DeviceGroupCommandBufferBeginInfoKHR = DeviceGroupCommandBufferBeginInfo;
+
+  class DisplayKHR
+  {
+  public:
+    using CType = VkDisplayKHR;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDisplayKHR;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayKHR;
+
+  public:
+    VULKAN_HPP_CONSTEXPR DisplayKHR() VULKAN_HPP_NOEXCEPT
+      : m_displayKHR(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR DisplayKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_displayKHR(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT DisplayKHR( VkDisplayKHR displayKHR ) VULKAN_HPP_NOEXCEPT
+      : m_displayKHR( displayKHR )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    DisplayKHR & operator=(VkDisplayKHR displayKHR) VULKAN_HPP_NOEXCEPT
+    {
+      m_displayKHR = displayKHR;
+      return *this;
+    }
+#endif
+
+    DisplayKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_displayKHR = VK_NULL_HANDLE;
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DisplayKHR const& ) const = default;
+#else
+    bool operator==( DisplayKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_displayKHR == rhs.m_displayKHR;
+    }
+
+    bool operator!=(DisplayKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_displayKHR != rhs.m_displayKHR;
+    }
+
+    bool operator<(DisplayKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_displayKHR < rhs.m_displayKHR;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDisplayKHR() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_displayKHR;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_displayKHR != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_displayKHR == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkDisplayKHR m_displayKHR;
+  };
+  static_assert( sizeof( VULKAN_HPP_NAMESPACE::DisplayKHR ) == sizeof( VkDisplayKHR ), "handle and wrapper have different size!" );
+
+  template <>
+  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eDisplayKHR>
+  {
+    using type = VULKAN_HPP_NAMESPACE::DisplayKHR;
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDisplayKHR>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::DisplayKHR;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayKHR>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::DisplayKHR;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DisplayKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  struct PerformanceConfigurationAcquireInfoINTEL
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceConfigurationAcquireInfoINTEL;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PerformanceConfigurationAcquireInfoINTEL(VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated) VULKAN_HPP_NOEXCEPT
+    : type( type_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PerformanceConfigurationAcquireInfoINTEL( PerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PerformanceConfigurationAcquireInfoINTEL( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PerformanceConfigurationAcquireInfoINTEL & operator=( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL const *>( &rhs );
+      return *this;
+    }
+
+    PerformanceConfigurationAcquireInfoINTEL & operator=( PerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PerformanceConfigurationAcquireInfoINTEL ) );
+      return *this;
+    }
+
+    PerformanceConfigurationAcquireInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PerformanceConfigurationAcquireInfoINTEL & setType( VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT
+    {
+      type = type_;
+      return *this;
+    }
+
+
+    operator VkPerformanceConfigurationAcquireInfoINTEL const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL*>( this );
+    }
+
+    operator VkPerformanceConfigurationAcquireInfoINTEL &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPerformanceConfigurationAcquireInfoINTEL*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PerformanceConfigurationAcquireInfoINTEL const& ) const = default;
+#else
+    bool operator==( PerformanceConfigurationAcquireInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( type == rhs.type );
+    }
+
+    bool operator!=( PerformanceConfigurationAcquireInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceConfigurationAcquireInfoINTEL;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type = VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated;
+
+  };
+  static_assert( sizeof( PerformanceConfigurationAcquireInfoINTEL ) == sizeof( VkPerformanceConfigurationAcquireInfoINTEL ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PerformanceConfigurationAcquireInfoINTEL>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePerformanceConfigurationAcquireInfoINTEL>
+  {
+    using Type = PerformanceConfigurationAcquireInfoINTEL;
+  };
+
+  class PerformanceConfigurationINTEL
+  {
+  public:
+    using CType = VkPerformanceConfigurationINTEL;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePerformanceConfigurationINTEL;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
+
+  public:
+    VULKAN_HPP_CONSTEXPR PerformanceConfigurationINTEL() VULKAN_HPP_NOEXCEPT
+      : m_performanceConfigurationINTEL(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR PerformanceConfigurationINTEL( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_performanceConfigurationINTEL(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT PerformanceConfigurationINTEL( VkPerformanceConfigurationINTEL performanceConfigurationINTEL ) VULKAN_HPP_NOEXCEPT
+      : m_performanceConfigurationINTEL( performanceConfigurationINTEL )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    PerformanceConfigurationINTEL & operator=(VkPerformanceConfigurationINTEL performanceConfigurationINTEL) VULKAN_HPP_NOEXCEPT
+    {
+      m_performanceConfigurationINTEL = performanceConfigurationINTEL;
+      return *this;
+    }
+#endif
+
+    PerformanceConfigurationINTEL & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_performanceConfigurationINTEL = VK_NULL_HANDLE;
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PerformanceConfigurationINTEL const& ) const = default;
+#else
+    bool operator==( PerformanceConfigurationINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_performanceConfigurationINTEL == rhs.m_performanceConfigurationINTEL;
+    }
+
+    bool operator!=(PerformanceConfigurationINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_performanceConfigurationINTEL != rhs.m_performanceConfigurationINTEL;
+    }
+
+    bool operator<(PerformanceConfigurationINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_performanceConfigurationINTEL < rhs.m_performanceConfigurationINTEL;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPerformanceConfigurationINTEL() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_performanceConfigurationINTEL;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_performanceConfigurationINTEL != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_performanceConfigurationINTEL == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkPerformanceConfigurationINTEL m_performanceConfigurationINTEL;
+  };
+  static_assert( sizeof( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL ) == sizeof( VkPerformanceConfigurationINTEL ), "handle and wrapper have different size!" );
+
+  template <>
+  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::ePerformanceConfigurationINTEL>
+  {
+    using type = VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL;
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::ePerformanceConfigurationINTEL>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL;
+  };
+
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  class QueryPool
+  {
+  public:
+    using CType = VkQueryPool;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eQueryPool;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueryPool;
+
+  public:
+    VULKAN_HPP_CONSTEXPR QueryPool() VULKAN_HPP_NOEXCEPT
+      : m_queryPool(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR QueryPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_queryPool(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT QueryPool( VkQueryPool queryPool ) VULKAN_HPP_NOEXCEPT
+      : m_queryPool( queryPool )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    QueryPool & operator=(VkQueryPool queryPool) VULKAN_HPP_NOEXCEPT
+    {
+      m_queryPool = queryPool;
+      return *this;
+    }
+#endif
+
+    QueryPool & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_queryPool = VK_NULL_HANDLE;
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( QueryPool const& ) const = default;
+#else
+    bool operator==( QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_queryPool == rhs.m_queryPool;
+    }
+
+    bool operator!=(QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_queryPool != rhs.m_queryPool;
+    }
+
+    bool operator<(QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_queryPool < rhs.m_queryPool;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkQueryPool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_queryPool;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_queryPool != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_queryPool == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkQueryPool m_queryPool;
+  };
+  static_assert( sizeof( VULKAN_HPP_NAMESPACE::QueryPool ) == sizeof( VkQueryPool ), "handle and wrapper have different size!" );
+
+  template <>
+  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eQueryPool>
+  {
+    using type = VULKAN_HPP_NAMESPACE::QueryPool;
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eQueryPool>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::QueryPool;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueryPool>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::QueryPool;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::QueryPool>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  struct RenderPassBeginInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassBeginInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo(VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ = {}, VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {}, uint32_t clearValueCount_ = {}, const VULKAN_HPP_NAMESPACE::ClearValue* pClearValues_ = {}) VULKAN_HPP_NOEXCEPT
+    : renderPass( renderPass_ ), framebuffer( framebuffer_ ), renderArea( renderArea_ ), clearValueCount( clearValueCount_ ), pClearValues( pClearValues_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo( RenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    RenderPassBeginInfo( VkRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    RenderPassBeginInfo( VULKAN_HPP_NAMESPACE::RenderPass renderPass_, VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_, VULKAN_HPP_NAMESPACE::Rect2D renderArea_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ClearValue> const & clearValues_ )
+    : renderPass( renderPass_ ), framebuffer( framebuffer_ ), renderArea( renderArea_ ), clearValueCount( static_cast<uint32_t>( clearValues_.size() ) ), pClearValues( clearValues_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    RenderPassBeginInfo & operator=( VkRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassBeginInfo const *>( &rhs );
+      return *this;
+    }
+
+    RenderPassBeginInfo & operator=( RenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( RenderPassBeginInfo ) );
+      return *this;
+    }
+
+    RenderPassBeginInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    RenderPassBeginInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
+    {
+      renderPass = renderPass_;
+      return *this;
+    }
+
+    RenderPassBeginInfo & setFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      framebuffer = framebuffer_;
+      return *this;
+    }
+
+    RenderPassBeginInfo & setRenderArea( VULKAN_HPP_NAMESPACE::Rect2D const & renderArea_ ) VULKAN_HPP_NOEXCEPT
+    {
+      renderArea = renderArea_;
+      return *this;
+    }
+
+    RenderPassBeginInfo & setClearValueCount( uint32_t clearValueCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      clearValueCount = clearValueCount_;
+      return *this;
+    }
+
+    RenderPassBeginInfo & setPClearValues( const VULKAN_HPP_NAMESPACE::ClearValue* pClearValues_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pClearValues = pClearValues_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    RenderPassBeginInfo & setClearValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ClearValue> const & clearValues_ ) VULKAN_HPP_NOEXCEPT
+    {
+      clearValueCount = static_cast<uint32_t>( clearValues_.size() );
+      pClearValues = clearValues_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkRenderPassBeginInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkRenderPassBeginInfo*>( this );
+    }
+
+    operator VkRenderPassBeginInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkRenderPassBeginInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( RenderPassBeginInfo const& ) const = default;
+#else
+    bool operator==( RenderPassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( renderPass == rhs.renderPass )
+          && ( framebuffer == rhs.framebuffer )
+          && ( renderArea == rhs.renderArea )
+          && ( clearValueCount == rhs.clearValueCount )
+          && ( pClearValues == rhs.pClearValues );
+    }
+
+    bool operator!=( RenderPassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassBeginInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::RenderPass renderPass = {};
+    VULKAN_HPP_NAMESPACE::Framebuffer framebuffer = {};
+    VULKAN_HPP_NAMESPACE::Rect2D renderArea = {};
+    uint32_t clearValueCount = {};
+    const VULKAN_HPP_NAMESPACE::ClearValue* pClearValues = {};
+
+  };
+  static_assert( sizeof( RenderPassBeginInfo ) == sizeof( VkRenderPassBeginInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<RenderPassBeginInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eRenderPassBeginInfo>
+  {
+    using Type = RenderPassBeginInfo;
+  };
+
+  struct SubpassBeginInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassBeginInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR SubpassBeginInfo(VULKAN_HPP_NAMESPACE::SubpassContents contents_ = VULKAN_HPP_NAMESPACE::SubpassContents::eInline) VULKAN_HPP_NOEXCEPT
+    : contents( contents_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SubpassBeginInfo( SubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SubpassBeginInfo( VkSubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    SubpassBeginInfo & operator=( VkSubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassBeginInfo const *>( &rhs );
+      return *this;
+    }
+
+    SubpassBeginInfo & operator=( SubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( SubpassBeginInfo ) );
+      return *this;
+    }
+
+    SubpassBeginInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    SubpassBeginInfo & setContents( VULKAN_HPP_NAMESPACE::SubpassContents contents_ ) VULKAN_HPP_NOEXCEPT
+    {
+      contents = contents_;
+      return *this;
+    }
+
+
+    operator VkSubpassBeginInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSubpassBeginInfo*>( this );
+    }
+
+    operator VkSubpassBeginInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSubpassBeginInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( SubpassBeginInfo const& ) const = default;
+#else
+    bool operator==( SubpassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( contents == rhs.contents );
+    }
+
+    bool operator!=( SubpassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassBeginInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::SubpassContents contents = VULKAN_HPP_NAMESPACE::SubpassContents::eInline;
+
+  };
+  static_assert( sizeof( SubpassBeginInfo ) == sizeof( VkSubpassBeginInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SubpassBeginInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eSubpassBeginInfo>
+  {
+    using Type = SubpassBeginInfo;
+  };
+  using SubpassBeginInfoKHR = SubpassBeginInfo;
+
+  struct ImageBlit
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR_14 ImageBlit(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const& srcOffsets_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const& dstOffsets_ = {}) VULKAN_HPP_NOEXCEPT
+    : srcSubresource( srcSubresource_ ), srcOffsets( srcOffsets_ ), dstSubresource( dstSubresource_ ), dstOffsets( dstOffsets_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 ImageBlit( ImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImageBlit( VkImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ImageBlit & operator=( VkImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageBlit const *>( &rhs );
+      return *this;
+    }
+
+    ImageBlit & operator=( ImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ImageBlit ) );
+      return *this;
+    }
+
+    ImageBlit & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcSubresource = srcSubresource_;
+      return *this;
+    }
+
+    ImageBlit & setSrcOffsets( std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const & srcOffsets_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcOffsets = srcOffsets_;
+      return *this;
+    }
+
+    ImageBlit & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstSubresource = dstSubresource_;
+      return *this;
+    }
+
+    ImageBlit & setDstOffsets( std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const & dstOffsets_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstOffsets = dstOffsets_;
+      return *this;
+    }
+
+
+    operator VkImageBlit const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageBlit*>( this );
+    }
+
+    operator VkImageBlit &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageBlit*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ImageBlit const& ) const = default;
+#else
+    bool operator==( ImageBlit const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( srcSubresource == rhs.srcSubresource )
+          && ( srcOffsets == rhs.srcOffsets )
+          && ( dstSubresource == rhs.dstSubresource )
+          && ( dstOffsets == rhs.dstOffsets );
+    }
+
+    bool operator!=( ImageBlit const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::Offset3D, 2> srcOffsets = {};
+    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::Offset3D, 2> dstOffsets = {};
+
+  };
+  static_assert( sizeof( ImageBlit ) == sizeof( VkImageBlit ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImageBlit>::value, "struct wrapper is not a standard layout!" );
+
+  struct ImageSubresourceRange
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ImageSubresourceRange(VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, uint32_t baseMipLevel_ = {}, uint32_t levelCount_ = {}, uint32_t baseArrayLayer_ = {}, uint32_t layerCount_ = {}) VULKAN_HPP_NOEXCEPT
+    : aspectMask( aspectMask_ ), baseMipLevel( baseMipLevel_ ), levelCount( levelCount_ ), baseArrayLayer( baseArrayLayer_ ), layerCount( layerCount_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImageSubresourceRange( ImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImageSubresourceRange( VkImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ImageSubresourceRange & operator=( VkImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSubresourceRange const *>( &rhs );
+      return *this;
+    }
+
+    ImageSubresourceRange & operator=( ImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ImageSubresourceRange ) );
+      return *this;
+    }
+
+    ImageSubresourceRange & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      aspectMask = aspectMask_;
+      return *this;
+    }
+
+    ImageSubresourceRange & setBaseMipLevel( uint32_t baseMipLevel_ ) VULKAN_HPP_NOEXCEPT
+    {
+      baseMipLevel = baseMipLevel_;
+      return *this;
+    }
+
+    ImageSubresourceRange & setLevelCount( uint32_t levelCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      levelCount = levelCount_;
+      return *this;
+    }
+
+    ImageSubresourceRange & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      baseArrayLayer = baseArrayLayer_;
+      return *this;
+    }
+
+    ImageSubresourceRange & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      layerCount = layerCount_;
+      return *this;
+    }
+
+
+    operator VkImageSubresourceRange const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageSubresourceRange*>( this );
+    }
+
+    operator VkImageSubresourceRange &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageSubresourceRange*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ImageSubresourceRange const& ) const = default;
+#else
+    bool operator==( ImageSubresourceRange const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( aspectMask == rhs.aspectMask )
+          && ( baseMipLevel == rhs.baseMipLevel )
+          && ( levelCount == rhs.levelCount )
+          && ( baseArrayLayer == rhs.baseArrayLayer )
+          && ( layerCount == rhs.layerCount );
+    }
+
+    bool operator!=( ImageSubresourceRange const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
+    uint32_t baseMipLevel = {};
+    uint32_t levelCount = {};
+    uint32_t baseArrayLayer = {};
+    uint32_t layerCount = {};
+
+  };
+  static_assert( sizeof( ImageSubresourceRange ) == sizeof( VkImageSubresourceRange ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImageSubresourceRange>::value, "struct wrapper is not a standard layout!" );
+
+  struct ImageCopy
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ImageCopy(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}) VULKAN_HPP_NOEXCEPT
+    : srcSubresource( srcSubresource_ ), srcOffset( srcOffset_ ), dstSubresource( dstSubresource_ ), dstOffset( dstOffset_ ), extent( extent_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImageCopy( ImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImageCopy( VkImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ImageCopy & operator=( VkImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageCopy const *>( &rhs );
+      return *this;
+    }
+
+    ImageCopy & operator=( ImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ImageCopy ) );
+      return *this;
+    }
+
+    ImageCopy & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcSubresource = srcSubresource_;
+      return *this;
+    }
+
+    ImageCopy & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcOffset = srcOffset_;
+      return *this;
+    }
+
+    ImageCopy & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstSubresource = dstSubresource_;
+      return *this;
+    }
+
+    ImageCopy & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstOffset = dstOffset_;
+      return *this;
+    }
+
+    ImageCopy & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
+    {
+      extent = extent_;
+      return *this;
+    }
+
+
+    operator VkImageCopy const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageCopy*>( this );
+    }
+
+    operator VkImageCopy &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageCopy*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ImageCopy const& ) const = default;
+#else
+    bool operator==( ImageCopy const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( srcSubresource == rhs.srcSubresource )
+          && ( srcOffset == rhs.srcOffset )
+          && ( dstSubresource == rhs.dstSubresource )
+          && ( dstOffset == rhs.dstOffset )
+          && ( extent == rhs.extent );
+    }
+
+    bool operator!=( ImageCopy const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
+    VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {};
+    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
+    VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {};
+    VULKAN_HPP_NAMESPACE::Extent3D extent = {};
+
+  };
+  static_assert( sizeof( ImageCopy ) == sizeof( VkImageCopy ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImageCopy>::value, "struct wrapper is not a standard layout!" );
+
+  struct SubpassEndInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassEndInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR SubpassEndInfo() VULKAN_HPP_NOEXCEPT
+    
+    {}
+
+    VULKAN_HPP_CONSTEXPR SubpassEndInfo( SubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SubpassEndInfo( VkSubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    SubpassEndInfo & operator=( VkSubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassEndInfo const *>( &rhs );
+      return *this;
+    }
+
+    SubpassEndInfo & operator=( SubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( SubpassEndInfo ) );
+      return *this;
+    }
+
+    SubpassEndInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+
+    operator VkSubpassEndInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSubpassEndInfo*>( this );
+    }
+
+    operator VkSubpassEndInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSubpassEndInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( SubpassEndInfo const& ) const = default;
+#else
+    bool operator==( SubpassEndInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext );
+    }
+
+    bool operator!=( SubpassEndInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassEndInfo;
+    const void* pNext = {};
+
+  };
+  static_assert( sizeof( SubpassEndInfo ) == sizeof( VkSubpassEndInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SubpassEndInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eSubpassEndInfo>
+  {
+    using Type = SubpassEndInfo;
+  };
+  using SubpassEndInfoKHR = SubpassEndInfo;
+
+  class IndirectCommandsLayoutNV
+  {
+  public:
+    using CType = VkIndirectCommandsLayoutNV;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eIndirectCommandsLayoutNV;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
+
+  public:
+    VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNV() VULKAN_HPP_NOEXCEPT
+      : m_indirectCommandsLayoutNV(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNV( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_indirectCommandsLayoutNV(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT IndirectCommandsLayoutNV( VkIndirectCommandsLayoutNV indirectCommandsLayoutNV ) VULKAN_HPP_NOEXCEPT
+      : m_indirectCommandsLayoutNV( indirectCommandsLayoutNV )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    IndirectCommandsLayoutNV & operator=(VkIndirectCommandsLayoutNV indirectCommandsLayoutNV) VULKAN_HPP_NOEXCEPT
+    {
+      m_indirectCommandsLayoutNV = indirectCommandsLayoutNV;
+      return *this;
+    }
+#endif
+
+    IndirectCommandsLayoutNV & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_indirectCommandsLayoutNV = VK_NULL_HANDLE;
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( IndirectCommandsLayoutNV const& ) const = default;
+#else
+    bool operator==( IndirectCommandsLayoutNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_indirectCommandsLayoutNV == rhs.m_indirectCommandsLayoutNV;
+    }
+
+    bool operator!=(IndirectCommandsLayoutNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_indirectCommandsLayoutNV != rhs.m_indirectCommandsLayoutNV;
+    }
+
+    bool operator<(IndirectCommandsLayoutNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_indirectCommandsLayoutNV < rhs.m_indirectCommandsLayoutNV;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkIndirectCommandsLayoutNV() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_indirectCommandsLayoutNV;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_indirectCommandsLayoutNV != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_indirectCommandsLayoutNV == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkIndirectCommandsLayoutNV m_indirectCommandsLayoutNV;
+  };
+  static_assert( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV ) == sizeof( VkIndirectCommandsLayoutNV ), "handle and wrapper have different size!" );
+
+  template <>
+  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eIndirectCommandsLayoutNV>
+  {
+    using type = VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV;
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eIndirectCommandsLayoutNV>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV;
+  };
+
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  struct IndirectCommandsStreamNV
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR IndirectCommandsStreamNV(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}) VULKAN_HPP_NOEXCEPT
+    : buffer( buffer_ ), offset( offset_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR IndirectCommandsStreamNV( IndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    IndirectCommandsStreamNV( VkIndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    IndirectCommandsStreamNV & operator=( VkIndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV const *>( &rhs );
+      return *this;
+    }
+
+    IndirectCommandsStreamNV & operator=( IndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( IndirectCommandsStreamNV ) );
+      return *this;
+    }
+
+    IndirectCommandsStreamNV & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      buffer = buffer_;
+      return *this;
+    }
+
+    IndirectCommandsStreamNV & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      offset = offset_;
+      return *this;
+    }
+
+
+    operator VkIndirectCommandsStreamNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkIndirectCommandsStreamNV*>( this );
+    }
+
+    operator VkIndirectCommandsStreamNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkIndirectCommandsStreamNV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( IndirectCommandsStreamNV const& ) const = default;
+#else
+    bool operator==( IndirectCommandsStreamNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( buffer == rhs.buffer )
+          && ( offset == rhs.offset );
+    }
+
+    bool operator!=( IndirectCommandsStreamNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::Buffer buffer = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
+
+  };
+  static_assert( sizeof( IndirectCommandsStreamNV ) == sizeof( VkIndirectCommandsStreamNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<IndirectCommandsStreamNV>::value, "struct wrapper is not a standard layout!" );
+
+  struct GeneratedCommandsInfoNV
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeneratedCommandsInfoNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR GeneratedCommandsInfoNV(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ = {}, uint32_t streamCount_ = {}, const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV* pStreams_ = {}, uint32_t sequencesCount_ = {}, VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ = {}, VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ = {}, VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ = {}) VULKAN_HPP_NOEXCEPT
+    : pipelineBindPoint( pipelineBindPoint_ ), pipeline( pipeline_ ), indirectCommandsLayout( indirectCommandsLayout_ ), streamCount( streamCount_ ), pStreams( pStreams_ ), sequencesCount( sequencesCount_ ), preprocessBuffer( preprocessBuffer_ ), preprocessOffset( preprocessOffset_ ), preprocessSize( preprocessSize_ ), sequencesCountBuffer( sequencesCountBuffer_ ), sequencesCountOffset( sequencesCountOffset_ ), sequencesIndexBuffer( sequencesIndexBuffer_ ), sequencesIndexOffset( sequencesIndexOffset_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR GeneratedCommandsInfoNV( GeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    GeneratedCommandsInfoNV( VkGeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    GeneratedCommandsInfoNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, VULKAN_HPP_NAMESPACE::Pipeline pipeline_, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV> const & streams_, uint32_t sequencesCount_ = {}, VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ = {}, VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ = {}, VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ = {} )
+    : pipelineBindPoint( pipelineBindPoint_ ), pipeline( pipeline_ ), indirectCommandsLayout( indirectCommandsLayout_ ), streamCount( static_cast<uint32_t>( streams_.size() ) ), pStreams( streams_.data() ), sequencesCount( sequencesCount_ ), preprocessBuffer( preprocessBuffer_ ), preprocessOffset( preprocessOffset_ ), preprocessSize( preprocessSize_ ), sequencesCountBuffer( sequencesCountBuffer_ ), sequencesCountOffset( sequencesCountOffset_ ), sequencesIndexBuffer( sequencesIndexBuffer_ ), sequencesIndexOffset( sequencesIndexOffset_ )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    GeneratedCommandsInfoNV & operator=( VkGeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV const *>( &rhs );
+      return *this;
+    }
+
+    GeneratedCommandsInfoNV & operator=( GeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( GeneratedCommandsInfoNV ) );
+      return *this;
+    }
+
+    GeneratedCommandsInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    GeneratedCommandsInfoNV & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipelineBindPoint = pipelineBindPoint_;
+      return *this;
+    }
+
+    GeneratedCommandsInfoNV & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipeline = pipeline_;
+      return *this;
+    }
+
+    GeneratedCommandsInfoNV & setIndirectCommandsLayout( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      indirectCommandsLayout = indirectCommandsLayout_;
+      return *this;
+    }
+
+    GeneratedCommandsInfoNV & setStreamCount( uint32_t streamCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      streamCount = streamCount_;
+      return *this;
+    }
+
+    GeneratedCommandsInfoNV & setPStreams( const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV* pStreams_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pStreams = pStreams_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    GeneratedCommandsInfoNV & setStreams( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV> const & streams_ ) VULKAN_HPP_NOEXCEPT
+    {
+      streamCount = static_cast<uint32_t>( streams_.size() );
+      pStreams = streams_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    GeneratedCommandsInfoNV & setSequencesCount( uint32_t sequencesCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sequencesCount = sequencesCount_;
+      return *this;
+    }
+
+    GeneratedCommandsInfoNV & setPreprocessBuffer( VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      preprocessBuffer = preprocessBuffer_;
+      return *this;
+    }
+
+    GeneratedCommandsInfoNV & setPreprocessOffset( VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      preprocessOffset = preprocessOffset_;
+      return *this;
+    }
+
+    GeneratedCommandsInfoNV & setPreprocessSize( VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      preprocessSize = preprocessSize_;
+      return *this;
+    }
+
+    GeneratedCommandsInfoNV & setSequencesCountBuffer( VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sequencesCountBuffer = sequencesCountBuffer_;
+      return *this;
+    }
+
+    GeneratedCommandsInfoNV & setSequencesCountOffset( VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sequencesCountOffset = sequencesCountOffset_;
+      return *this;
+    }
+
+    GeneratedCommandsInfoNV & setSequencesIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sequencesIndexBuffer = sequencesIndexBuffer_;
+      return *this;
+    }
+
+    GeneratedCommandsInfoNV & setSequencesIndexOffset( VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sequencesIndexOffset = sequencesIndexOffset_;
+      return *this;
+    }
+
+
+    operator VkGeneratedCommandsInfoNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkGeneratedCommandsInfoNV*>( this );
+    }
+
+    operator VkGeneratedCommandsInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkGeneratedCommandsInfoNV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( GeneratedCommandsInfoNV const& ) const = default;
+#else
+    bool operator==( GeneratedCommandsInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pipelineBindPoint == rhs.pipelineBindPoint )
+          && ( pipeline == rhs.pipeline )
+          && ( indirectCommandsLayout == rhs.indirectCommandsLayout )
+          && ( streamCount == rhs.streamCount )
+          && ( pStreams == rhs.pStreams )
+          && ( sequencesCount == rhs.sequencesCount )
+          && ( preprocessBuffer == rhs.preprocessBuffer )
+          && ( preprocessOffset == rhs.preprocessOffset )
+          && ( preprocessSize == rhs.preprocessSize )
+          && ( sequencesCountBuffer == rhs.sequencesCountBuffer )
+          && ( sequencesCountOffset == rhs.sequencesCountOffset )
+          && ( sequencesIndexBuffer == rhs.sequencesIndexBuffer )
+          && ( sequencesIndexOffset == rhs.sequencesIndexOffset );
+    }
+
+    bool operator!=( GeneratedCommandsInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeneratedCommandsInfoNV;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
+    VULKAN_HPP_NAMESPACE::Pipeline pipeline = {};
+    VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout = {};
+    uint32_t streamCount = {};
+    const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV* pStreams = {};
+    uint32_t sequencesCount = {};
+    VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize = {};
+    VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset = {};
+    VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset = {};
+
+  };
+  static_assert( sizeof( GeneratedCommandsInfoNV ) == sizeof( VkGeneratedCommandsInfoNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<GeneratedCommandsInfoNV>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eGeneratedCommandsInfoNV>
+  {
+    using Type = GeneratedCommandsInfoNV;
+  };
+
+  struct MemoryBarrier
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryBarrier;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR MemoryBarrier(VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}) VULKAN_HPP_NOEXCEPT
+    : srcAccessMask( srcAccessMask_ ), dstAccessMask( dstAccessMask_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR MemoryBarrier( MemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    MemoryBarrier( VkMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    MemoryBarrier & operator=( VkMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryBarrier const *>( &rhs );
+      return *this;
+    }
+
+    MemoryBarrier & operator=( MemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( MemoryBarrier ) );
+      return *this;
+    }
+
+    MemoryBarrier & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    MemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcAccessMask = srcAccessMask_;
+      return *this;
+    }
+
+    MemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstAccessMask = dstAccessMask_;
+      return *this;
+    }
+
+
+    operator VkMemoryBarrier const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMemoryBarrier*>( this );
+    }
+
+    operator VkMemoryBarrier &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMemoryBarrier*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( MemoryBarrier const& ) const = default;
+#else
+    bool operator==( MemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( srcAccessMask == rhs.srcAccessMask )
+          && ( dstAccessMask == rhs.dstAccessMask );
+    }
+
+    bool operator!=( MemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryBarrier;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {};
+    VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {};
+
+  };
+  static_assert( sizeof( MemoryBarrier ) == sizeof( VkMemoryBarrier ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<MemoryBarrier>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eMemoryBarrier>
+  {
+    using Type = MemoryBarrier;
+  };
+
+  struct ImageMemoryBarrier
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageMemoryBarrier;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ImageMemoryBarrier(VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, uint32_t srcQueueFamilyIndex_ = {}, uint32_t dstQueueFamilyIndex_ = {}, VULKAN_HPP_NAMESPACE::Image image_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {}) VULKAN_HPP_NOEXCEPT
+    : srcAccessMask( srcAccessMask_ ), dstAccessMask( dstAccessMask_ ), oldLayout( oldLayout_ ), newLayout( newLayout_ ), srcQueueFamilyIndex( srcQueueFamilyIndex_ ), dstQueueFamilyIndex( dstQueueFamilyIndex_ ), image( image_ ), subresourceRange( subresourceRange_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImageMemoryBarrier( ImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImageMemoryBarrier( VkImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ImageMemoryBarrier & operator=( VkImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageMemoryBarrier const *>( &rhs );
+      return *this;
+    }
+
+    ImageMemoryBarrier & operator=( ImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ImageMemoryBarrier ) );
+      return *this;
+    }
+
+    ImageMemoryBarrier & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImageMemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcAccessMask = srcAccessMask_;
+      return *this;
+    }
+
+    ImageMemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstAccessMask = dstAccessMask_;
+      return *this;
+    }
+
+    ImageMemoryBarrier & setOldLayout( VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      oldLayout = oldLayout_;
+      return *this;
+    }
+
+    ImageMemoryBarrier & setNewLayout( VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      newLayout = newLayout_;
+      return *this;
+    }
+
+    ImageMemoryBarrier & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcQueueFamilyIndex = srcQueueFamilyIndex_;
+      return *this;
+    }
+
+    ImageMemoryBarrier & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstQueueFamilyIndex = dstQueueFamilyIndex_;
+      return *this;
+    }
+
+    ImageMemoryBarrier & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
+    {
+      image = image_;
+      return *this;
+    }
+
+    ImageMemoryBarrier & setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT
+    {
+      subresourceRange = subresourceRange_;
+      return *this;
+    }
+
+
+    operator VkImageMemoryBarrier const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageMemoryBarrier*>( this );
+    }
+
+    operator VkImageMemoryBarrier &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageMemoryBarrier*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ImageMemoryBarrier const& ) const = default;
+#else
+    bool operator==( ImageMemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( srcAccessMask == rhs.srcAccessMask )
+          && ( dstAccessMask == rhs.dstAccessMask )
+          && ( oldLayout == rhs.oldLayout )
+          && ( newLayout == rhs.newLayout )
+          && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex )
+          && ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex )
+          && ( image == rhs.image )
+          && ( subresourceRange == rhs.subresourceRange );
+    }
+
+    bool operator!=( ImageMemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryBarrier;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {};
+    VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {};
+    VULKAN_HPP_NAMESPACE::ImageLayout oldLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+    VULKAN_HPP_NAMESPACE::ImageLayout newLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+    uint32_t srcQueueFamilyIndex = {};
+    uint32_t dstQueueFamilyIndex = {};
+    VULKAN_HPP_NAMESPACE::Image image = {};
+    VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {};
+
+  };
+  static_assert( sizeof( ImageMemoryBarrier ) == sizeof( VkImageMemoryBarrier ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImageMemoryBarrier>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eImageMemoryBarrier>
+  {
+    using Type = ImageMemoryBarrier;
+  };
+
+  class BufferView
+  {
+  public:
+    using CType = VkBufferView;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eBufferView;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferView;
+
+  public:
+    VULKAN_HPP_CONSTEXPR BufferView() VULKAN_HPP_NOEXCEPT
+      : m_bufferView(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR BufferView( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_bufferView(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT BufferView( VkBufferView bufferView ) VULKAN_HPP_NOEXCEPT
+      : m_bufferView( bufferView )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    BufferView & operator=(VkBufferView bufferView) VULKAN_HPP_NOEXCEPT
+    {
+      m_bufferView = bufferView;
+      return *this;
+    }
+#endif
+
+    BufferView & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_bufferView = VK_NULL_HANDLE;
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( BufferView const& ) const = default;
+#else
+    bool operator==( BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_bufferView == rhs.m_bufferView;
+    }
+
+    bool operator!=(BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_bufferView != rhs.m_bufferView;
+    }
+
+    bool operator<(BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_bufferView < rhs.m_bufferView;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkBufferView() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_bufferView;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_bufferView != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_bufferView == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkBufferView m_bufferView;
+  };
+  static_assert( sizeof( VULKAN_HPP_NAMESPACE::BufferView ) == sizeof( VkBufferView ), "handle and wrapper have different size!" );
+
+  template <>
+  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eBufferView>
+  {
+    using type = VULKAN_HPP_NAMESPACE::BufferView;
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eBufferView>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::BufferView;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferView>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::BufferView;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::BufferView>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  struct WriteDescriptorSet
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteDescriptorSet;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR WriteDescriptorSet(VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ = {}, uint32_t dstBinding_ = {}, uint32_t dstArrayElement_ = {}, uint32_t descriptorCount_ = {}, VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, const VULKAN_HPP_NAMESPACE::DescriptorImageInfo* pImageInfo_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo* pBufferInfo_ = {}, const VULKAN_HPP_NAMESPACE::BufferView* pTexelBufferView_ = {}) VULKAN_HPP_NOEXCEPT
+    : dstSet( dstSet_ ), dstBinding( dstBinding_ ), dstArrayElement( dstArrayElement_ ), descriptorCount( descriptorCount_ ), descriptorType( descriptorType_ ), pImageInfo( pImageInfo_ ), pBufferInfo( pBufferInfo_ ), pTexelBufferView( pTexelBufferView_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR WriteDescriptorSet( WriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    WriteDescriptorSet( VkWriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    WriteDescriptorSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_, uint32_t dstBinding_, uint32_t dstArrayElement_, VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorImageInfo> const & imageInfo_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo> const & bufferInfo_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferView> const & texelBufferView_ = {} )
+    : dstSet( dstSet_ ), dstBinding( dstBinding_ ), dstArrayElement( dstArrayElement_ ), descriptorCount( static_cast<uint32_t>( !imageInfo_.empty() ? imageInfo_.size() : !bufferInfo_.empty() ? bufferInfo_.size() : texelBufferView_.size() ) ), descriptorType( descriptorType_ ), pImageInfo( imageInfo_.data() ), pBufferInfo( bufferInfo_.data() ), pTexelBufferView( texelBufferView_.data() )
+    {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+      VULKAN_HPP_ASSERT( ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() ) == 1 );
+#else
+      if ( ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() ) != 1 )
+      {
+        throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::WriteDescriptorSet::WriteDescriptorSet: ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() ) != 1" );
+      }
+#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    WriteDescriptorSet & operator=( VkWriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::WriteDescriptorSet const *>( &rhs );
+      return *this;
+    }
+
+    WriteDescriptorSet & operator=( WriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( WriteDescriptorSet ) );
+      return *this;
+    }
+
+    WriteDescriptorSet & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    WriteDescriptorSet & setDstSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstSet = dstSet_;
+      return *this;
+    }
+
+    WriteDescriptorSet & setDstBinding( uint32_t dstBinding_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstBinding = dstBinding_;
+      return *this;
+    }
+
+    WriteDescriptorSet & setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstArrayElement = dstArrayElement_;
+      return *this;
+    }
+
+    WriteDescriptorSet & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorCount = descriptorCount_;
+      return *this;
+    }
+
+    WriteDescriptorSet & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorType = descriptorType_;
+      return *this;
+    }
+
+    WriteDescriptorSet & setPImageInfo( const VULKAN_HPP_NAMESPACE::DescriptorImageInfo* pImageInfo_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pImageInfo = pImageInfo_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    WriteDescriptorSet & setImageInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorImageInfo> const & imageInfo_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorCount = static_cast<uint32_t>( imageInfo_.size() );
+      pImageInfo = imageInfo_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    WriteDescriptorSet & setPBufferInfo( const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo* pBufferInfo_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pBufferInfo = pBufferInfo_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    WriteDescriptorSet & setBufferInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo> const & bufferInfo_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorCount = static_cast<uint32_t>( bufferInfo_.size() );
+      pBufferInfo = bufferInfo_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    WriteDescriptorSet & setPTexelBufferView( const VULKAN_HPP_NAMESPACE::BufferView* pTexelBufferView_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pTexelBufferView = pTexelBufferView_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    WriteDescriptorSet & setTexelBufferView( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferView> const & texelBufferView_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorCount = static_cast<uint32_t>( texelBufferView_.size() );
+      pTexelBufferView = texelBufferView_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkWriteDescriptorSet const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkWriteDescriptorSet*>( this );
+    }
+
+    operator VkWriteDescriptorSet &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkWriteDescriptorSet*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( WriteDescriptorSet const& ) const = default;
+#else
+    bool operator==( WriteDescriptorSet const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( dstSet == rhs.dstSet )
+          && ( dstBinding == rhs.dstBinding )
+          && ( dstArrayElement == rhs.dstArrayElement )
+          && ( descriptorCount == rhs.descriptorCount )
+          && ( descriptorType == rhs.descriptorType )
+          && ( pImageInfo == rhs.pImageInfo )
+          && ( pBufferInfo == rhs.pBufferInfo )
+          && ( pTexelBufferView == rhs.pTexelBufferView );
+    }
+
+    bool operator!=( WriteDescriptorSet const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSet;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::DescriptorSet dstSet = {};
+    uint32_t dstBinding = {};
+    uint32_t dstArrayElement = {};
+    uint32_t descriptorCount = {};
+    VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler;
+    const VULKAN_HPP_NAMESPACE::DescriptorImageInfo* pImageInfo = {};
+    const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo* pBufferInfo = {};
+    const VULKAN_HPP_NAMESPACE::BufferView* pTexelBufferView = {};
+
+  };
+  static_assert( sizeof( WriteDescriptorSet ) == sizeof( VkWriteDescriptorSet ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<WriteDescriptorSet>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eWriteDescriptorSet>
+  {
+    using Type = WriteDescriptorSet;
+  };
+
+  class DescriptorUpdateTemplate
+  {
+  public:
+    using CType = VkDescriptorUpdateTemplate;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorUpdateTemplate;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorUpdateTemplate;
+
+  public:
+    VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplate() VULKAN_HPP_NOEXCEPT
+      : m_descriptorUpdateTemplate(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplate( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_descriptorUpdateTemplate(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorUpdateTemplate( VkDescriptorUpdateTemplate descriptorUpdateTemplate ) VULKAN_HPP_NOEXCEPT
+      : m_descriptorUpdateTemplate( descriptorUpdateTemplate )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    DescriptorUpdateTemplate & operator=(VkDescriptorUpdateTemplate descriptorUpdateTemplate) VULKAN_HPP_NOEXCEPT
+    {
+      m_descriptorUpdateTemplate = descriptorUpdateTemplate;
+      return *this;
+    }
+#endif
+
+    DescriptorUpdateTemplate & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_descriptorUpdateTemplate = VK_NULL_HANDLE;
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DescriptorUpdateTemplate const& ) const = default;
+#else
+    bool operator==( DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorUpdateTemplate == rhs.m_descriptorUpdateTemplate;
+    }
+
+    bool operator!=(DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorUpdateTemplate != rhs.m_descriptorUpdateTemplate;
+    }
+
+    bool operator<(DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorUpdateTemplate < rhs.m_descriptorUpdateTemplate;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorUpdateTemplate() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorUpdateTemplate;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorUpdateTemplate != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorUpdateTemplate == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkDescriptorUpdateTemplate m_descriptorUpdateTemplate;
+  };
+  static_assert( sizeof( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate ) == sizeof( VkDescriptorUpdateTemplate ), "handle and wrapper have different size!" );
+
+  template <>
+  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eDescriptorUpdateTemplate>
+  {
+    using type = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate;
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorUpdateTemplate>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorUpdateTemplate>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+  using DescriptorUpdateTemplateKHR = DescriptorUpdateTemplate;
+
+  class Event
+  {
+  public:
+    using CType = VkEvent;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eEvent;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eEvent;
+
+  public:
+    VULKAN_HPP_CONSTEXPR Event() VULKAN_HPP_NOEXCEPT
+      : m_event(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR Event( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_event(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT Event( VkEvent event ) VULKAN_HPP_NOEXCEPT
+      : m_event( event )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    Event & operator=(VkEvent event) VULKAN_HPP_NOEXCEPT
+    {
+      m_event = event;
+      return *this;
+    }
+#endif
+
+    Event & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_event = VK_NULL_HANDLE;
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( Event const& ) const = default;
+#else
+    bool operator==( Event const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_event == rhs.m_event;
+    }
+
+    bool operator!=(Event const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_event != rhs.m_event;
+    }
+
+    bool operator<(Event const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_event < rhs.m_event;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkEvent() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_event;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_event != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_event == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkEvent m_event;
+  };
+  static_assert( sizeof( VULKAN_HPP_NAMESPACE::Event ) == sizeof( VkEvent ), "handle and wrapper have different size!" );
+
+  template <>
+  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eEvent>
+  {
+    using type = VULKAN_HPP_NAMESPACE::Event;
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eEvent>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::Event;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eEvent>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::Event;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Event>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  struct ImageResolve
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ImageResolve(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}) VULKAN_HPP_NOEXCEPT
+    : srcSubresource( srcSubresource_ ), srcOffset( srcOffset_ ), dstSubresource( dstSubresource_ ), dstOffset( dstOffset_ ), extent( extent_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImageResolve( ImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImageResolve( VkImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ImageResolve & operator=( VkImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageResolve const *>( &rhs );
+      return *this;
+    }
+
+    ImageResolve & operator=( ImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ImageResolve ) );
+      return *this;
+    }
+
+    ImageResolve & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcSubresource = srcSubresource_;
+      return *this;
+    }
+
+    ImageResolve & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcOffset = srcOffset_;
+      return *this;
+    }
+
+    ImageResolve & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstSubresource = dstSubresource_;
+      return *this;
+    }
+
+    ImageResolve & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstOffset = dstOffset_;
+      return *this;
+    }
+
+    ImageResolve & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
+    {
+      extent = extent_;
+      return *this;
+    }
+
+
+    operator VkImageResolve const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageResolve*>( this );
+    }
+
+    operator VkImageResolve &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageResolve*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ImageResolve const& ) const = default;
+#else
+    bool operator==( ImageResolve const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( srcSubresource == rhs.srcSubresource )
+          && ( srcOffset == rhs.srcOffset )
+          && ( dstSubresource == rhs.dstSubresource )
+          && ( dstOffset == rhs.dstOffset )
+          && ( extent == rhs.extent );
+    }
+
+    bool operator!=( ImageResolve const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
+    VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {};
+    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
+    VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {};
+    VULKAN_HPP_NAMESPACE::Extent3D extent = {};
+
+  };
+  static_assert( sizeof( ImageResolve ) == sizeof( VkImageResolve ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImageResolve>::value, "struct wrapper is not a standard layout!" );
+
+  struct ImageResolve2KHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageResolve2KHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ImageResolve2KHR(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}) VULKAN_HPP_NOEXCEPT
+    : srcSubresource( srcSubresource_ ), srcOffset( srcOffset_ ), dstSubresource( dstSubresource_ ), dstOffset( dstOffset_ ), extent( extent_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImageResolve2KHR( ImageResolve2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImageResolve2KHR( VkImageResolve2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ImageResolve2KHR & operator=( VkImageResolve2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageResolve2KHR const *>( &rhs );
+      return *this;
+    }
+
+    ImageResolve2KHR & operator=( ImageResolve2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ImageResolve2KHR ) );
+      return *this;
+    }
+
+    ImageResolve2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImageResolve2KHR & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcSubresource = srcSubresource_;
+      return *this;
+    }
+
+    ImageResolve2KHR & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcOffset = srcOffset_;
+      return *this;
+    }
+
+    ImageResolve2KHR & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstSubresource = dstSubresource_;
+      return *this;
+    }
+
+    ImageResolve2KHR & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstOffset = dstOffset_;
+      return *this;
+    }
+
+    ImageResolve2KHR & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
+    {
+      extent = extent_;
+      return *this;
+    }
+
+
+    operator VkImageResolve2KHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageResolve2KHR*>( this );
+    }
+
+    operator VkImageResolve2KHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageResolve2KHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ImageResolve2KHR const& ) const = default;
+#else
+    bool operator==( ImageResolve2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( srcSubresource == rhs.srcSubresource )
+          && ( srcOffset == rhs.srcOffset )
+          && ( dstSubresource == rhs.dstSubresource )
+          && ( dstOffset == rhs.dstOffset )
+          && ( extent == rhs.extent );
+    }
+
+    bool operator!=( ImageResolve2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageResolve2KHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
+    VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {};
+    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
+    VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {};
+    VULKAN_HPP_NAMESPACE::Extent3D extent = {};
+
+  };
+  static_assert( sizeof( ImageResolve2KHR ) == sizeof( VkImageResolve2KHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImageResolve2KHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eImageResolve2KHR>
+  {
+    using Type = ImageResolve2KHR;
+  };
+
+  struct ResolveImageInfo2KHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eResolveImageInfo2KHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ResolveImageInfo2KHR(VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageResolve2KHR* pRegions_ = {}) VULKAN_HPP_NOEXCEPT
+    : srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( regionCount_ ), pRegions( pRegions_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ResolveImageInfo2KHR( ResolveImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ResolveImageInfo2KHR( VkResolveImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    ResolveImageInfo2KHR( VULKAN_HPP_NAMESPACE::Image srcImage_, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, VULKAN_HPP_NAMESPACE::Image dstImage_, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageResolve2KHR> const & regions_ )
+    : srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ResolveImageInfo2KHR & operator=( VkResolveImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ResolveImageInfo2KHR const *>( &rhs );
+      return *this;
+    }
+
+    ResolveImageInfo2KHR & operator=( ResolveImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ResolveImageInfo2KHR ) );
+      return *this;
+    }
+
+    ResolveImageInfo2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ResolveImageInfo2KHR & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcImage = srcImage_;
+      return *this;
+    }
+
+    ResolveImageInfo2KHR & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcImageLayout = srcImageLayout_;
+      return *this;
+    }
+
+    ResolveImageInfo2KHR & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstImage = dstImage_;
+      return *this;
+    }
+
+    ResolveImageInfo2KHR & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstImageLayout = dstImageLayout_;
+      return *this;
+    }
+
+    ResolveImageInfo2KHR & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      regionCount = regionCount_;
+      return *this;
+    }
+
+    ResolveImageInfo2KHR & setPRegions( const VULKAN_HPP_NAMESPACE::ImageResolve2KHR* pRegions_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pRegions = pRegions_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    ResolveImageInfo2KHR & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageResolve2KHR> const & regions_ ) VULKAN_HPP_NOEXCEPT
+    {
+      regionCount = static_cast<uint32_t>( regions_.size() );
+      pRegions = regions_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkResolveImageInfo2KHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkResolveImageInfo2KHR*>( this );
+    }
+
+    operator VkResolveImageInfo2KHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkResolveImageInfo2KHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ResolveImageInfo2KHR const& ) const = default;
+#else
+    bool operator==( ResolveImageInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( srcImage == rhs.srcImage )
+          && ( srcImageLayout == rhs.srcImageLayout )
+          && ( dstImage == rhs.dstImage )
+          && ( dstImageLayout == rhs.dstImageLayout )
+          && ( regionCount == rhs.regionCount )
+          && ( pRegions == rhs.pRegions );
+    }
+
+    bool operator!=( ResolveImageInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eResolveImageInfo2KHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Image srcImage = {};
+    VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+    VULKAN_HPP_NAMESPACE::Image dstImage = {};
+    VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+    uint32_t regionCount = {};
+    const VULKAN_HPP_NAMESPACE::ImageResolve2KHR* pRegions = {};
+
+  };
+  static_assert( sizeof( ResolveImageInfo2KHR ) == sizeof( VkResolveImageInfo2KHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ResolveImageInfo2KHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eResolveImageInfo2KHR>
+  {
+    using Type = ResolveImageInfo2KHR;
+  };
+
+  struct PerformanceMarkerInfoINTEL
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceMarkerInfoINTEL;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PerformanceMarkerInfoINTEL(uint64_t marker_ = {}) VULKAN_HPP_NOEXCEPT
+    : marker( marker_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PerformanceMarkerInfoINTEL( PerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PerformanceMarkerInfoINTEL( VkPerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PerformanceMarkerInfoINTEL & operator=( VkPerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL const *>( &rhs );
+      return *this;
+    }
+
+    PerformanceMarkerInfoINTEL & operator=( PerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PerformanceMarkerInfoINTEL ) );
+      return *this;
+    }
+
+    PerformanceMarkerInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PerformanceMarkerInfoINTEL & setMarker( uint64_t marker_ ) VULKAN_HPP_NOEXCEPT
+    {
+      marker = marker_;
+      return *this;
+    }
+
+
+    operator VkPerformanceMarkerInfoINTEL const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPerformanceMarkerInfoINTEL*>( this );
+    }
+
+    operator VkPerformanceMarkerInfoINTEL &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPerformanceMarkerInfoINTEL*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PerformanceMarkerInfoINTEL const& ) const = default;
+#else
+    bool operator==( PerformanceMarkerInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( marker == rhs.marker );
+    }
+
+    bool operator!=( PerformanceMarkerInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceMarkerInfoINTEL;
+    const void* pNext = {};
+    uint64_t marker = {};
+
+  };
+  static_assert( sizeof( PerformanceMarkerInfoINTEL ) == sizeof( VkPerformanceMarkerInfoINTEL ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PerformanceMarkerInfoINTEL>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePerformanceMarkerInfoINTEL>
+  {
+    using Type = PerformanceMarkerInfoINTEL;
+  };
+
+  struct PerformanceOverrideInfoINTEL
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceOverrideInfoINTEL;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PerformanceOverrideInfoINTEL(VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL::eNullHardware, VULKAN_HPP_NAMESPACE::Bool32 enable_ = {}, uint64_t parameter_ = {}) VULKAN_HPP_NOEXCEPT
+    : type( type_ ), enable( enable_ ), parameter( parameter_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PerformanceOverrideInfoINTEL( PerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PerformanceOverrideInfoINTEL( VkPerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PerformanceOverrideInfoINTEL & operator=( VkPerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL const *>( &rhs );
+      return *this;
+    }
+
+    PerformanceOverrideInfoINTEL & operator=( PerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PerformanceOverrideInfoINTEL ) );
+      return *this;
+    }
+
+    PerformanceOverrideInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PerformanceOverrideInfoINTEL & setType( VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT
+    {
+      type = type_;
+      return *this;
+    }
+
+    PerformanceOverrideInfoINTEL & setEnable( VULKAN_HPP_NAMESPACE::Bool32 enable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      enable = enable_;
+      return *this;
+    }
+
+    PerformanceOverrideInfoINTEL & setParameter( uint64_t parameter_ ) VULKAN_HPP_NOEXCEPT
+    {
+      parameter = parameter_;
+      return *this;
+    }
+
+
+    operator VkPerformanceOverrideInfoINTEL const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPerformanceOverrideInfoINTEL*>( this );
+    }
+
+    operator VkPerformanceOverrideInfoINTEL &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPerformanceOverrideInfoINTEL*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PerformanceOverrideInfoINTEL const& ) const = default;
+#else
+    bool operator==( PerformanceOverrideInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( type == rhs.type )
+          && ( enable == rhs.enable )
+          && ( parameter == rhs.parameter );
+    }
+
+    bool operator!=( PerformanceOverrideInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceOverrideInfoINTEL;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type = VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL::eNullHardware;
+    VULKAN_HPP_NAMESPACE::Bool32 enable = {};
+    uint64_t parameter = {};
+
+  };
+  static_assert( sizeof( PerformanceOverrideInfoINTEL ) == sizeof( VkPerformanceOverrideInfoINTEL ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PerformanceOverrideInfoINTEL>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePerformanceOverrideInfoINTEL>
+  {
+    using Type = PerformanceOverrideInfoINTEL;
+  };
+
+  struct PerformanceStreamMarkerInfoINTEL
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceStreamMarkerInfoINTEL;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PerformanceStreamMarkerInfoINTEL(uint32_t marker_ = {}) VULKAN_HPP_NOEXCEPT
+    : marker( marker_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PerformanceStreamMarkerInfoINTEL( PerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PerformanceStreamMarkerInfoINTEL( VkPerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PerformanceStreamMarkerInfoINTEL & operator=( VkPerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL const *>( &rhs );
+      return *this;
+    }
+
+    PerformanceStreamMarkerInfoINTEL & operator=( PerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PerformanceStreamMarkerInfoINTEL ) );
+      return *this;
+    }
+
+    PerformanceStreamMarkerInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PerformanceStreamMarkerInfoINTEL & setMarker( uint32_t marker_ ) VULKAN_HPP_NOEXCEPT
+    {
+      marker = marker_;
+      return *this;
+    }
+
+
+    operator VkPerformanceStreamMarkerInfoINTEL const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL*>( this );
+    }
+
+    operator VkPerformanceStreamMarkerInfoINTEL &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPerformanceStreamMarkerInfoINTEL*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PerformanceStreamMarkerInfoINTEL const& ) const = default;
+#else
+    bool operator==( PerformanceStreamMarkerInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( marker == rhs.marker );
+    }
+
+    bool operator!=( PerformanceStreamMarkerInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceStreamMarkerInfoINTEL;
+    const void* pNext = {};
+    uint32_t marker = {};
+
+  };
+  static_assert( sizeof( PerformanceStreamMarkerInfoINTEL ) == sizeof( VkPerformanceStreamMarkerInfoINTEL ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PerformanceStreamMarkerInfoINTEL>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePerformanceStreamMarkerInfoINTEL>
+  {
+    using Type = PerformanceStreamMarkerInfoINTEL;
+  };
+
+  struct Viewport
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR Viewport(float x_ = {}, float y_ = {}, float width_ = {}, float height_ = {}, float minDepth_ = {}, float maxDepth_ = {}) VULKAN_HPP_NOEXCEPT
+    : x( x_ ), y( y_ ), width( width_ ), height( height_ ), minDepth( minDepth_ ), maxDepth( maxDepth_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR Viewport( Viewport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    Viewport( VkViewport const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    Viewport & operator=( VkViewport const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Viewport const *>( &rhs );
+      return *this;
+    }
+
+    Viewport & operator=( Viewport const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( Viewport ) );
+      return *this;
+    }
+
+    Viewport & setX( float x_ ) VULKAN_HPP_NOEXCEPT
+    {
+      x = x_;
+      return *this;
+    }
+
+    Viewport & setY( float y_ ) VULKAN_HPP_NOEXCEPT
+    {
+      y = y_;
+      return *this;
+    }
+
+    Viewport & setWidth( float width_ ) VULKAN_HPP_NOEXCEPT
+    {
+      width = width_;
+      return *this;
+    }
+
+    Viewport & setHeight( float height_ ) VULKAN_HPP_NOEXCEPT
+    {
+      height = height_;
+      return *this;
+    }
+
+    Viewport & setMinDepth( float minDepth_ ) VULKAN_HPP_NOEXCEPT
+    {
+      minDepth = minDepth_;
+      return *this;
+    }
+
+    Viewport & setMaxDepth( float maxDepth_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxDepth = maxDepth_;
+      return *this;
+    }
+
+
+    operator VkViewport const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkViewport*>( this );
+    }
+
+    operator VkViewport &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkViewport*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( Viewport const& ) const = default;
+#else
+    bool operator==( Viewport const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( x == rhs.x )
+          && ( y == rhs.y )
+          && ( width == rhs.width )
+          && ( height == rhs.height )
+          && ( minDepth == rhs.minDepth )
+          && ( maxDepth == rhs.maxDepth );
+    }
+
+    bool operator!=( Viewport const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    float x = {};
+    float y = {};
+    float width = {};
+    float height = {};
+    float minDepth = {};
+    float maxDepth = {};
+
+  };
+  static_assert( sizeof( Viewport ) == sizeof( VkViewport ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<Viewport>::value, "struct wrapper is not a standard layout!" );
+
+  struct ShadingRatePaletteNV
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ShadingRatePaletteNV(uint32_t shadingRatePaletteEntryCount_ = {}, const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV* pShadingRatePaletteEntries_ = {}) VULKAN_HPP_NOEXCEPT
+    : shadingRatePaletteEntryCount( shadingRatePaletteEntryCount_ ), pShadingRatePaletteEntries( pShadingRatePaletteEntries_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ShadingRatePaletteNV( ShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ShadingRatePaletteNV( VkShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    ShadingRatePaletteNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV> const & shadingRatePaletteEntries_ )
+    : shadingRatePaletteEntryCount( static_cast<uint32_t>( shadingRatePaletteEntries_.size() ) ), pShadingRatePaletteEntries( shadingRatePaletteEntries_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ShadingRatePaletteNV & operator=( VkShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV const *>( &rhs );
+      return *this;
+    }
+
+    ShadingRatePaletteNV & operator=( ShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ShadingRatePaletteNV ) );
+      return *this;
+    }
+
+    ShadingRatePaletteNV & setShadingRatePaletteEntryCount( uint32_t shadingRatePaletteEntryCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shadingRatePaletteEntryCount = shadingRatePaletteEntryCount_;
+      return *this;
+    }
+
+    ShadingRatePaletteNV & setPShadingRatePaletteEntries( const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV* pShadingRatePaletteEntries_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pShadingRatePaletteEntries = pShadingRatePaletteEntries_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    ShadingRatePaletteNV & setShadingRatePaletteEntries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV> const & shadingRatePaletteEntries_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shadingRatePaletteEntryCount = static_cast<uint32_t>( shadingRatePaletteEntries_.size() );
+      pShadingRatePaletteEntries = shadingRatePaletteEntries_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkShadingRatePaletteNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkShadingRatePaletteNV*>( this );
+    }
+
+    operator VkShadingRatePaletteNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkShadingRatePaletteNV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ShadingRatePaletteNV const& ) const = default;
+#else
+    bool operator==( ShadingRatePaletteNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( shadingRatePaletteEntryCount == rhs.shadingRatePaletteEntryCount )
+          && ( pShadingRatePaletteEntries == rhs.pShadingRatePaletteEntries );
+    }
+
+    bool operator!=( ShadingRatePaletteNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    uint32_t shadingRatePaletteEntryCount = {};
+    const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV* pShadingRatePaletteEntries = {};
+
+  };
+  static_assert( sizeof( ShadingRatePaletteNV ) == sizeof( VkShadingRatePaletteNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ShadingRatePaletteNV>::value, "struct wrapper is not a standard layout!" );
+
+  struct ViewportWScalingNV
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ViewportWScalingNV(float xcoeff_ = {}, float ycoeff_ = {}) VULKAN_HPP_NOEXCEPT
+    : xcoeff( xcoeff_ ), ycoeff( ycoeff_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ViewportWScalingNV( ViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ViewportWScalingNV( VkViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ViewportWScalingNV & operator=( VkViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ViewportWScalingNV const *>( &rhs );
+      return *this;
+    }
+
+    ViewportWScalingNV & operator=( ViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ViewportWScalingNV ) );
+      return *this;
+    }
+
+    ViewportWScalingNV & setXcoeff( float xcoeff_ ) VULKAN_HPP_NOEXCEPT
+    {
+      xcoeff = xcoeff_;
+      return *this;
+    }
+
+    ViewportWScalingNV & setYcoeff( float ycoeff_ ) VULKAN_HPP_NOEXCEPT
+    {
+      ycoeff = ycoeff_;
+      return *this;
+    }
+
+
+    operator VkViewportWScalingNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkViewportWScalingNV*>( this );
+    }
+
+    operator VkViewportWScalingNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkViewportWScalingNV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ViewportWScalingNV const& ) const = default;
+#else
+    bool operator==( ViewportWScalingNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( xcoeff == rhs.xcoeff )
+          && ( ycoeff == rhs.ycoeff );
+    }
+
+    bool operator!=( ViewportWScalingNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    float xcoeff = {};
+    float ycoeff = {};
+
+  };
+  static_assert( sizeof( ViewportWScalingNV ) == sizeof( VkViewportWScalingNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ViewportWScalingNV>::value, "struct wrapper is not a standard layout!" );
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+  struct StridedBufferRegionKHR
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR StridedBufferRegionKHR(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize stride_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}) VULKAN_HPP_NOEXCEPT
+    : buffer( buffer_ ), offset( offset_ ), stride( stride_ ), size( size_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR StridedBufferRegionKHR( StridedBufferRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    StridedBufferRegionKHR( VkStridedBufferRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    explicit StridedBufferRegionKHR( IndirectCommandsStreamNV const& indirectCommandsStreamNV, VULKAN_HPP_NAMESPACE::DeviceSize stride_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} )
+      : buffer( indirectCommandsStreamNV.buffer )
+      , offset( indirectCommandsStreamNV.offset )
+      , stride( stride_ )
+      , size( size_ )
+    {}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    StridedBufferRegionKHR & operator=( VkStridedBufferRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR const *>( &rhs );
+      return *this;
+    }
+
+    StridedBufferRegionKHR & operator=( StridedBufferRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( StridedBufferRegionKHR ) );
+      return *this;
+    }
+
+    StridedBufferRegionKHR & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      buffer = buffer_;
+      return *this;
+    }
+
+    StridedBufferRegionKHR & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      offset = offset_;
+      return *this;
+    }
+
+    StridedBufferRegionKHR & setStride( VULKAN_HPP_NAMESPACE::DeviceSize stride_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stride = stride_;
+      return *this;
+    }
+
+    StridedBufferRegionKHR & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
+    {
+      size = size_;
+      return *this;
+    }
+
+
+    operator VkStridedBufferRegionKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkStridedBufferRegionKHR*>( this );
+    }
+
+    operator VkStridedBufferRegionKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkStridedBufferRegionKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( StridedBufferRegionKHR const& ) const = default;
+#else
+    bool operator==( StridedBufferRegionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( buffer == rhs.buffer )
+          && ( offset == rhs.offset )
+          && ( stride == rhs.stride )
+          && ( size == rhs.size );
+    }
+
+    bool operator!=( StridedBufferRegionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::Buffer buffer = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize stride = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize size = {};
+
+  };
+  static_assert( sizeof( StridedBufferRegionKHR ) == sizeof( VkStridedBufferRegionKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<StridedBufferRegionKHR>::value, "struct wrapper is not a standard layout!" );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  class CommandBuffer
+  {
+  public:
+    using CType = VkCommandBuffer;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCommandBuffer;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandBuffer;
+
+  public:
+    VULKAN_HPP_CONSTEXPR CommandBuffer() VULKAN_HPP_NOEXCEPT
+      : m_commandBuffer(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR CommandBuffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_commandBuffer(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT CommandBuffer( VkCommandBuffer commandBuffer ) VULKAN_HPP_NOEXCEPT
+      : m_commandBuffer( commandBuffer )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    CommandBuffer & operator=(VkCommandBuffer commandBuffer) VULKAN_HPP_NOEXCEPT
+    {
+      m_commandBuffer = commandBuffer;
+      return *this;
+    }
+#endif
+
+    CommandBuffer & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_commandBuffer = VK_NULL_HANDLE;
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( CommandBuffer const& ) const = default;
+#else
+    bool operator==( CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_commandBuffer == rhs.m_commandBuffer;
+    }
+
+    bool operator!=(CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_commandBuffer != rhs.m_commandBuffer;
+    }
+
+    bool operator<(CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_commandBuffer < rhs.m_commandBuffer;
+    }
+#endif
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo* pBeginInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type begin( const CommandBufferBeginInfo & beginInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginConditionalRenderingEXT( const ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, uint32_t index, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginRenderPass( const RenderPassBeginInfo & renderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginRenderPass2( const RenderPassBeginInfo & renderPassBegin, const SubpassBeginInfo & subpassBeginInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginRenderPass2KHR( const RenderPassBeginInfo & renderPassBegin, const SubpassBeginInfo & subpassBeginInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer* pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pCounterBufferOffsets VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets, ArrayProxy<const uint32_t> const & dynamicOffsets, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::IndexType indexType, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t groupIndex, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void bindTransformFeedbackBuffersEXT( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, const VULKAN_HPP_NAMESPACE::DeviceSize* pSizes VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void bindTransformFeedbackBuffersEXT( uint32_t firstBinding, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void bindVertexBuffers( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void bindVertexBuffers( uint32_t firstBinding, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void bindVertexBuffers2EXT( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, const VULKAN_HPP_NAMESPACE::DeviceSize* pSizes VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::DeviceSize* pStrides VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void bindVertexBuffers2EXT( uint32_t firstBinding, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageBlit* pRegions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageBlit> const & regions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2KHR* pBlitImageInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void blitImage2KHR( const BlitImageInfo2KHR & blitImageInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void buildAccelerationStructureIndirectKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR* pInfo, VULKAN_HPP_NAMESPACE::Buffer indirectBuffer, VULKAN_HPP_NAMESPACE::DeviceSize indirectOffset, uint32_t indirectStride, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void buildAccelerationStructureIndirectKHR( const AccelerationStructureBuildGeometryInfoKHR & info, VULKAN_HPP_NAMESPACE::Buffer indirectBuffer, VULKAN_HPP_NAMESPACE::DeviceSize indirectOffset, uint32_t indirectStride, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void buildAccelerationStructureKHR( uint32_t infoCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR* pInfos, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildOffsetInfoKHR* const * ppOffsetInfos, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void buildAccelerationStructureKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos, ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildOffsetInfoKHR* const > const & pOffsetInfos, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV* pInfo, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void buildAccelerationStructureNV( const AccelerationStructureInfoNV & info, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void clearAttachments( uint32_t attachmentCount, const VULKAN_HPP_NAMESPACE::ClearAttachment* pAttachments, uint32_t rectCount, const VULKAN_HPP_NAMESPACE::ClearRect* pRects, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void clearAttachments( ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearAttachment> const & attachments, ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearRect> const & rects, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearColorValue* pColor, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange* pRanges, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const ClearColorValue & color, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange* pRanges, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const ClearDepthStencilValue & depthStencil, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR* pInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyAccelerationStructureKHR( const CopyAccelerationStructureInfoKHR & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR* pInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyAccelerationStructureToMemoryKHR( const CopyAccelerationStructureToMemoryInfoKHR & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferCopy* pRegions, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferCopy> const & regions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2KHR* pCopyBufferInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyBuffer2KHR( const CopyBufferInfo2KHR & copyBufferInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy* pRegions, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2KHR* pCopyBufferToImageInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyBufferToImage2KHR( const CopyBufferToImageInfo2KHR & copyBufferToImageInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageCopy* pRegions, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageCopy> const & regions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2KHR* pCopyImageInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyImage2KHR( const CopyImageInfo2KHR & copyImageInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy* pRegions, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2KHR* pCopyImageToBufferInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyImageToBuffer2KHR( const CopyImageToBufferInfo2KHR & copyImageToBufferInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR* pInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyMemoryToAccelerationStructureKHR( const CopyMemoryToAccelerationStructureInfoKHR & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void debugMarkerBeginEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void debugMarkerEndEXT( Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void debugMarkerInsertEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void dispatchBase( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void dispatchBaseKHR( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void drawIndirectByteCountEXT( uint32_t instanceCount, uint32_t firstInstance, VULKAN_HPP_NAMESPACE::Buffer counterBuffer, VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void endConditionalRenderingEXT( Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void endDebugUtilsLabelEXT( Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void endRenderPass( Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void endRenderPass2( const SubpassEndInfo & subpassEndInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void endRenderPass2KHR( const SubpassEndInfo & subpassEndInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void endTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer* pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pCounterBufferOffsets VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void endTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void executeCommands( uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void executeCommands( ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV* pGeneratedCommandsInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, const GeneratedCommandsInfoNV & generatedCommandsInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize size, uint32_t data, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void nextSubpass2( const SubpassBeginInfo & subpassBeginInfo, const SubpassEndInfo & subpassEndInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void nextSubpass2KHR( const SubpassBeginInfo & subpassBeginInfo, const SubpassEndInfo & subpassEndInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV* pGeneratedCommandsInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void preprocessGeneratedCommandsNV( const GeneratedCommandsInfoNV & generatedCommandsInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, ArrayProxy<const T> const & values, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet* pDescriptorWrites, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, const void* pData, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void resetEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageResolve* pRegions, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageResolve> const & regions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2KHR* pResolveImageInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void resolveImage2KHR( const ResolveImageInfo2KHR & resolveImageInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setBlendConstants( const float blendConstants[4], Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setCheckpointNV( const void* pCheckpointMarker, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV* pCustomSampleOrders, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, ArrayProxy<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> const & customSampleOrders, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setDeviceMask( uint32_t deviceMask, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setDeviceMaskKHR( uint32_t deviceMask, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VULKAN_HPP_NAMESPACE::Rect2D* pDiscardRectangles, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setExclusiveScissorNV( uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pExclusiveScissors, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setExclusiveScissorNV( uint32_t firstExclusiveScissor, ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & exclusiveScissors, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D* pFragmentSize, const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setFragmentShadingRateKHR( const Extent2D & fragmentSize, const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setLineWidth( float lineWidth, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL* pMarkerInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type setPerformanceMarkerINTEL( const PerformanceMarkerInfoINTEL & markerInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL* pOverrideInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type setPerformanceOverrideINTEL( const PerformanceOverrideInfoINTEL & overrideInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL* pMarkerInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type setPerformanceStreamMarkerINTEL( const PerformanceStreamMarkerInfoINTEL & markerInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT* pSampleLocationsInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setSampleLocationsEXT( const SampleLocationsInfoEXT & sampleLocationsInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setScissor( uint32_t firstScissor, uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pScissors, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setScissor( uint32_t firstScissor, ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setScissorWithCountEXT( uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pScissors, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setScissorWithCountEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, VULKAN_HPP_NAMESPACE::StencilOp failOp, VULKAN_HPP_NAMESPACE::StencilOp passOp, VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, VULKAN_HPP_NAMESPACE::CompareOp compareOp, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setViewport( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::Viewport* pViewports, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setViewport( uint32_t firstViewport, ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setViewportShadingRatePaletteNV( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV* pShadingRatePalettes, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setViewportShadingRatePaletteNV( uint32_t firstViewport, ArrayProxy<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> const & shadingRatePalettes, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setViewportWScalingNV( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ViewportWScalingNV* pViewportWScalings, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setViewportWScalingNV( uint32_t firstViewport, ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const & viewportWScalings, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setViewportWithCountEXT( uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::Viewport* pViewports, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setViewportWithCountEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pRaygenShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pMissShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pHitShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pCallableShaderBindingTable, VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void traceRaysIndirectKHR( const StridedBufferRegionKHR & raygenShaderBindingTable, const StridedBufferRegionKHR & missShaderBindingTable, const StridedBufferRegionKHR & hitShaderBindingTable, const StridedBufferRegionKHR & callableShaderBindingTable, VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pRaygenShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pMissShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pHitShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pCallableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void traceRaysKHR( const StridedBufferRegionKHR & raygenShaderBindingTable, const StridedBufferRegionKHR & missShaderBindingTable, const StridedBufferRegionKHR & hitShaderBindingTable, const StridedBufferRegionKHR & callableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize dataSize, const void* pData, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, ArrayProxy<const T> const & data, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void waitEvents( uint32_t eventCount, const VULKAN_HPP_NAMESPACE::Event* pEvents, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void waitEvents( ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void writeAccelerationStructuresPropertiesKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void writeAccelerationStructuresPropertiesNV( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void writeAccelerationStructuresPropertiesNV( ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, uint32_t marker, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result end( Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type end( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<void>::type reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCommandBuffer() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_commandBuffer;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_commandBuffer != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_commandBuffer == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkCommandBuffer m_commandBuffer;
+  };
+  static_assert( sizeof( VULKAN_HPP_NAMESPACE::CommandBuffer ) == sizeof( VkCommandBuffer ), "handle and wrapper have different size!" );
+
+  template <>
+  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eCommandBuffer>
+  {
+    using type = VULKAN_HPP_NAMESPACE::CommandBuffer;
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eCommandBuffer>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::CommandBuffer;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandBuffer>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::CommandBuffer;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::CommandBuffer>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  struct MemoryAllocateInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryAllocateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR MemoryAllocateInfo(VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ = {}, uint32_t memoryTypeIndex_ = {}) VULKAN_HPP_NOEXCEPT
+    : allocationSize( allocationSize_ ), memoryTypeIndex( memoryTypeIndex_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR MemoryAllocateInfo( MemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    MemoryAllocateInfo( VkMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    MemoryAllocateInfo & operator=( VkMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryAllocateInfo const *>( &rhs );
+      return *this;
+    }
+
+    MemoryAllocateInfo & operator=( MemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( MemoryAllocateInfo ) );
+      return *this;
+    }
+
+    MemoryAllocateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    MemoryAllocateInfo & setAllocationSize( VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      allocationSize = allocationSize_;
+      return *this;
+    }
+
+    MemoryAllocateInfo & setMemoryTypeIndex( uint32_t memoryTypeIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memoryTypeIndex = memoryTypeIndex_;
+      return *this;
+    }
+
+
+    operator VkMemoryAllocateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMemoryAllocateInfo*>( this );
+    }
+
+    operator VkMemoryAllocateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMemoryAllocateInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( MemoryAllocateInfo const& ) const = default;
+#else
+    bool operator==( MemoryAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( allocationSize == rhs.allocationSize )
+          && ( memoryTypeIndex == rhs.memoryTypeIndex );
+    }
+
+    bool operator!=( MemoryAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryAllocateInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize allocationSize = {};
+    uint32_t memoryTypeIndex = {};
+
+  };
+  static_assert( sizeof( MemoryAllocateInfo ) == sizeof( VkMemoryAllocateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<MemoryAllocateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eMemoryAllocateInfo>
+  {
+    using Type = MemoryAllocateInfo;
+  };
+
+  class PipelineCache
+  {
+  public:
+    using CType = VkPipelineCache;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipelineCache;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineCache;
+
+  public:
+    VULKAN_HPP_CONSTEXPR PipelineCache() VULKAN_HPP_NOEXCEPT
+      : m_pipelineCache(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineCache( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_pipelineCache(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT PipelineCache( VkPipelineCache pipelineCache ) VULKAN_HPP_NOEXCEPT
+      : m_pipelineCache( pipelineCache )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    PipelineCache & operator=(VkPipelineCache pipelineCache) VULKAN_HPP_NOEXCEPT
+    {
+      m_pipelineCache = pipelineCache;
+      return *this;
+    }
+#endif
+
+    PipelineCache & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_pipelineCache = VK_NULL_HANDLE;
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PipelineCache const& ) const = default;
+#else
+    bool operator==( PipelineCache const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_pipelineCache == rhs.m_pipelineCache;
+    }
+
+    bool operator!=(PipelineCache const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_pipelineCache != rhs.m_pipelineCache;
+    }
+
+    bool operator<(PipelineCache const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_pipelineCache < rhs.m_pipelineCache;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipelineCache() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_pipelineCache;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_pipelineCache != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_pipelineCache == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkPipelineCache m_pipelineCache;
+  };
+  static_assert( sizeof( VULKAN_HPP_NAMESPACE::PipelineCache ) == sizeof( VkPipelineCache ), "handle and wrapper have different size!" );
+
+  template <>
+  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::ePipelineCache>
+  {
+    using type = VULKAN_HPP_NAMESPACE::PipelineCache;
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::ePipelineCache>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::PipelineCache;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineCache>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::PipelineCache;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::PipelineCache>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  struct EventCreateInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eEventCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR EventCreateInfo(VULKAN_HPP_NAMESPACE::EventCreateFlags flags_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR EventCreateInfo( EventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    EventCreateInfo( VkEventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    EventCreateInfo & operator=( VkEventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::EventCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+    EventCreateInfo & operator=( EventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( EventCreateInfo ) );
+      return *this;
+    }
+
+    EventCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    EventCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::EventCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+
+    operator VkEventCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkEventCreateInfo*>( this );
+    }
+
+    operator VkEventCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkEventCreateInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( EventCreateInfo const& ) const = default;
+#else
+    bool operator==( EventCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags );
+    }
+
+    bool operator!=( EventCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eEventCreateInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::EventCreateFlags flags = {};
+
+  };
+  static_assert( sizeof( EventCreateInfo ) == sizeof( VkEventCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<EventCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eEventCreateInfo>
+  {
+    using Type = EventCreateInfo;
+  };
+
+  struct FenceCreateInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFenceCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR FenceCreateInfo(VULKAN_HPP_NAMESPACE::FenceCreateFlags flags_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR FenceCreateInfo( FenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    FenceCreateInfo( VkFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    FenceCreateInfo & operator=( VkFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FenceCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+    FenceCreateInfo & operator=( FenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( FenceCreateInfo ) );
+      return *this;
+    }
+
+    FenceCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    FenceCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::FenceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+
+    operator VkFenceCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkFenceCreateInfo*>( this );
+    }
+
+    operator VkFenceCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkFenceCreateInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( FenceCreateInfo const& ) const = default;
+#else
+    bool operator==( FenceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags );
+    }
+
+    bool operator!=( FenceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceCreateInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::FenceCreateFlags flags = {};
+
+  };
+  static_assert( sizeof( FenceCreateInfo ) == sizeof( VkFenceCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<FenceCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eFenceCreateInfo>
+  {
+    using Type = FenceCreateInfo;
+  };
+
+  struct FramebufferCreateInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR FramebufferCreateInfo(VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, uint32_t attachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageView* pAttachments_ = {}, uint32_t width_ = {}, uint32_t height_ = {}, uint32_t layers_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), renderPass( renderPass_ ), attachmentCount( attachmentCount_ ), pAttachments( pAttachments_ ), width( width_ ), height( height_ ), layers( layers_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR FramebufferCreateInfo( FramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    FramebufferCreateInfo( VkFramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    FramebufferCreateInfo( VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_, VULKAN_HPP_NAMESPACE::RenderPass renderPass_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageView> const & attachments_, uint32_t width_ = {}, uint32_t height_ = {}, uint32_t layers_ = {} )
+    : flags( flags_ ), renderPass( renderPass_ ), attachmentCount( static_cast<uint32_t>( attachments_.size() ) ), pAttachments( attachments_.data() ), width( width_ ), height( height_ ), layers( layers_ )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    FramebufferCreateInfo & operator=( VkFramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FramebufferCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+    FramebufferCreateInfo & operator=( FramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( FramebufferCreateInfo ) );
+      return *this;
+    }
+
+    FramebufferCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    FramebufferCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    FramebufferCreateInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
+    {
+      renderPass = renderPass_;
+      return *this;
+    }
+
+    FramebufferCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      attachmentCount = attachmentCount_;
+      return *this;
+    }
+
+    FramebufferCreateInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::ImageView* pAttachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pAttachments = pAttachments_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    FramebufferCreateInfo & setAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageView> const & attachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      attachmentCount = static_cast<uint32_t>( attachments_.size() );
+      pAttachments = attachments_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    FramebufferCreateInfo & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT
+    {
+      width = width_;
+      return *this;
+    }
+
+    FramebufferCreateInfo & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT
+    {
+      height = height_;
+      return *this;
+    }
+
+    FramebufferCreateInfo & setLayers( uint32_t layers_ ) VULKAN_HPP_NOEXCEPT
+    {
+      layers = layers_;
+      return *this;
+    }
+
+
+    operator VkFramebufferCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkFramebufferCreateInfo*>( this );
+    }
+
+    operator VkFramebufferCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkFramebufferCreateInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( FramebufferCreateInfo const& ) const = default;
+#else
+    bool operator==( FramebufferCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( renderPass == rhs.renderPass )
+          && ( attachmentCount == rhs.attachmentCount )
+          && ( pAttachments == rhs.pAttachments )
+          && ( width == rhs.width )
+          && ( height == rhs.height )
+          && ( layers == rhs.layers );
+    }
+
+    bool operator!=( FramebufferCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferCreateInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags = {};
+    VULKAN_HPP_NAMESPACE::RenderPass renderPass = {};
+    uint32_t attachmentCount = {};
+    const VULKAN_HPP_NAMESPACE::ImageView* pAttachments = {};
+    uint32_t width = {};
+    uint32_t height = {};
+    uint32_t layers = {};
+
+  };
+  static_assert( sizeof( FramebufferCreateInfo ) == sizeof( VkFramebufferCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<FramebufferCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eFramebufferCreateInfo>
+  {
+    using Type = FramebufferCreateInfo;
+  };
+
+  struct VertexInputBindingDescription
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR VertexInputBindingDescription(uint32_t binding_ = {}, uint32_t stride_ = {}, VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex) VULKAN_HPP_NOEXCEPT
+    : binding( binding_ ), stride( stride_ ), inputRate( inputRate_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VertexInputBindingDescription( VertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VertexInputBindingDescription( VkVertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    VertexInputBindingDescription & operator=( VkVertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VertexInputBindingDescription const *>( &rhs );
+      return *this;
+    }
+
+    VertexInputBindingDescription & operator=( VertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( VertexInputBindingDescription ) );
+      return *this;
+    }
+
+    VertexInputBindingDescription & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT
+    {
+      binding = binding_;
+      return *this;
+    }
+
+    VertexInputBindingDescription & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stride = stride_;
+      return *this;
+    }
+
+    VertexInputBindingDescription & setInputRate( VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ ) VULKAN_HPP_NOEXCEPT
+    {
+      inputRate = inputRate_;
+      return *this;
+    }
+
+
+    operator VkVertexInputBindingDescription const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVertexInputBindingDescription*>( this );
+    }
+
+    operator VkVertexInputBindingDescription &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVertexInputBindingDescription*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( VertexInputBindingDescription const& ) const = default;
+#else
+    bool operator==( VertexInputBindingDescription const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( binding == rhs.binding )
+          && ( stride == rhs.stride )
+          && ( inputRate == rhs.inputRate );
+    }
+
+    bool operator!=( VertexInputBindingDescription const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    uint32_t binding = {};
+    uint32_t stride = {};
+    VULKAN_HPP_NAMESPACE::VertexInputRate inputRate = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex;
+
+  };
+  static_assert( sizeof( VertexInputBindingDescription ) == sizeof( VkVertexInputBindingDescription ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<VertexInputBindingDescription>::value, "struct wrapper is not a standard layout!" );
+
+  struct VertexInputAttributeDescription
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription(uint32_t location_ = {}, uint32_t binding_ = {}, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, uint32_t offset_ = {}) VULKAN_HPP_NOEXCEPT
+    : location( location_ ), binding( binding_ ), format( format_ ), offset( offset_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription( VertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VertexInputAttributeDescription( VkVertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    VertexInputAttributeDescription & operator=( VkVertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription const *>( &rhs );
+      return *this;
+    }
+
+    VertexInputAttributeDescription & operator=( VertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( VertexInputAttributeDescription ) );
+      return *this;
+    }
+
+    VertexInputAttributeDescription & setLocation( uint32_t location_ ) VULKAN_HPP_NOEXCEPT
+    {
+      location = location_;
+      return *this;
+    }
+
+    VertexInputAttributeDescription & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT
+    {
+      binding = binding_;
+      return *this;
+    }
+
+    VertexInputAttributeDescription & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
+    {
+      format = format_;
+      return *this;
+    }
+
+    VertexInputAttributeDescription & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      offset = offset_;
+      return *this;
+    }
+
+
+    operator VkVertexInputAttributeDescription const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVertexInputAttributeDescription*>( this );
+    }
+
+    operator VkVertexInputAttributeDescription &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVertexInputAttributeDescription*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( VertexInputAttributeDescription const& ) const = default;
+#else
+    bool operator==( VertexInputAttributeDescription const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( location == rhs.location )
+          && ( binding == rhs.binding )
+          && ( format == rhs.format )
+          && ( offset == rhs.offset );
+    }
+
+    bool operator!=( VertexInputAttributeDescription const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    uint32_t location = {};
+    uint32_t binding = {};
+    VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+    uint32_t offset = {};
+
+  };
+  static_assert( sizeof( VertexInputAttributeDescription ) == sizeof( VkVertexInputAttributeDescription ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<VertexInputAttributeDescription>::value, "struct wrapper is not a standard layout!" );
+
+  struct PipelineVertexInputStateCreateInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineVertexInputStateCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PipelineVertexInputStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_ = {}, uint32_t vertexBindingDescriptionCount_ = {}, const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription* pVertexBindingDescriptions_ = {}, uint32_t vertexAttributeDescriptionCount_ = {}, const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription* pVertexAttributeDescriptions_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), vertexBindingDescriptionCount( vertexBindingDescriptionCount_ ), pVertexBindingDescriptions( pVertexBindingDescriptions_ ), vertexAttributeDescriptionCount( vertexAttributeDescriptionCount_ ), pVertexAttributeDescriptions( pVertexAttributeDescriptions_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineVertexInputStateCreateInfo( PipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineVertexInputStateCreateInfo( VkPipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    PipelineVertexInputStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription> const & vertexBindingDescriptions_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription> const & vertexAttributeDescriptions_ = {} )
+    : flags( flags_ ), vertexBindingDescriptionCount( static_cast<uint32_t>( vertexBindingDescriptions_.size() ) ), pVertexBindingDescriptions( vertexBindingDescriptions_.data() ), vertexAttributeDescriptionCount( static_cast<uint32_t>( vertexAttributeDescriptions_.size() ) ), pVertexAttributeDescriptions( vertexAttributeDescriptions_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PipelineVertexInputStateCreateInfo & operator=( VkPipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+    PipelineVertexInputStateCreateInfo & operator=( PipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineVertexInputStateCreateInfo ) );
+      return *this;
+    }
+
+    PipelineVertexInputStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineVertexInputStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    PipelineVertexInputStateCreateInfo & setVertexBindingDescriptionCount( uint32_t vertexBindingDescriptionCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vertexBindingDescriptionCount = vertexBindingDescriptionCount_;
+      return *this;
+    }
+
+    PipelineVertexInputStateCreateInfo & setPVertexBindingDescriptions( const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription* pVertexBindingDescriptions_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pVertexBindingDescriptions = pVertexBindingDescriptions_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    PipelineVertexInputStateCreateInfo & setVertexBindingDescriptions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription> const & vertexBindingDescriptions_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vertexBindingDescriptionCount = static_cast<uint32_t>( vertexBindingDescriptions_.size() );
+      pVertexBindingDescriptions = vertexBindingDescriptions_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    PipelineVertexInputStateCreateInfo & setVertexAttributeDescriptionCount( uint32_t vertexAttributeDescriptionCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vertexAttributeDescriptionCount = vertexAttributeDescriptionCount_;
+      return *this;
+    }
+
+    PipelineVertexInputStateCreateInfo & setPVertexAttributeDescriptions( const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription* pVertexAttributeDescriptions_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pVertexAttributeDescriptions = pVertexAttributeDescriptions_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    PipelineVertexInputStateCreateInfo & setVertexAttributeDescriptions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription> const & vertexAttributeDescriptions_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vertexAttributeDescriptionCount = static_cast<uint32_t>( vertexAttributeDescriptions_.size() );
+      pVertexAttributeDescriptions = vertexAttributeDescriptions_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkPipelineVertexInputStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineVertexInputStateCreateInfo*>( this );
+    }
+
+    operator VkPipelineVertexInputStateCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineVertexInputStateCreateInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PipelineVertexInputStateCreateInfo const& ) const = default;
+#else
+    bool operator==( PipelineVertexInputStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( vertexBindingDescriptionCount == rhs.vertexBindingDescriptionCount )
+          && ( pVertexBindingDescriptions == rhs.pVertexBindingDescriptions )
+          && ( vertexAttributeDescriptionCount == rhs.vertexAttributeDescriptionCount )
+          && ( pVertexAttributeDescriptions == rhs.pVertexAttributeDescriptions );
+    }
+
+    bool operator!=( PipelineVertexInputStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineVertexInputStateCreateInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags = {};
+    uint32_t vertexBindingDescriptionCount = {};
+    const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription* pVertexBindingDescriptions = {};
+    uint32_t vertexAttributeDescriptionCount = {};
+    const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription* pVertexAttributeDescriptions = {};
+
+  };
+  static_assert( sizeof( PipelineVertexInputStateCreateInfo ) == sizeof( VkPipelineVertexInputStateCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineVertexInputStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineVertexInputStateCreateInfo>
+  {
+    using Type = PipelineVertexInputStateCreateInfo;
+  };
+
+  struct PipelineInputAssemblyStateCreateInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineInputAssemblyStateCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PipelineInputAssemblyStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::PrimitiveTopology topology_ = VULKAN_HPP_NAMESPACE::PrimitiveTopology::ePointList, VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), topology( topology_ ), primitiveRestartEnable( primitiveRestartEnable_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineInputAssemblyStateCreateInfo( PipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineInputAssemblyStateCreateInfo( VkPipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PipelineInputAssemblyStateCreateInfo & operator=( VkPipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+    PipelineInputAssemblyStateCreateInfo & operator=( PipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineInputAssemblyStateCreateInfo ) );
+      return *this;
+    }
+
+    PipelineInputAssemblyStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineInputAssemblyStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    PipelineInputAssemblyStateCreateInfo & setTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology topology_ ) VULKAN_HPP_NOEXCEPT
+    {
+      topology = topology_;
+      return *this;
+    }
+
+    PipelineInputAssemblyStateCreateInfo & setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      primitiveRestartEnable = primitiveRestartEnable_;
+      return *this;
+    }
+
+
+    operator VkPipelineInputAssemblyStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineInputAssemblyStateCreateInfo*>( this );
+    }
+
+    operator VkPipelineInputAssemblyStateCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineInputAssemblyStateCreateInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PipelineInputAssemblyStateCreateInfo const& ) const = default;
+#else
+    bool operator==( PipelineInputAssemblyStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( topology == rhs.topology )
+          && ( primitiveRestartEnable == rhs.primitiveRestartEnable );
+    }
+
+    bool operator!=( PipelineInputAssemblyStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineInputAssemblyStateCreateInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags = {};
+    VULKAN_HPP_NAMESPACE::PrimitiveTopology topology = VULKAN_HPP_NAMESPACE::PrimitiveTopology::ePointList;
+    VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable = {};
+
+  };
+  static_assert( sizeof( PipelineInputAssemblyStateCreateInfo ) == sizeof( VkPipelineInputAssemblyStateCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineInputAssemblyStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineInputAssemblyStateCreateInfo>
+  {
+    using Type = PipelineInputAssemblyStateCreateInfo;
+  };
+
+  struct PipelineTessellationStateCreateInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineTessellationStateCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PipelineTessellationStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags_ = {}, uint32_t patchControlPoints_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), patchControlPoints( patchControlPoints_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineTessellationStateCreateInfo( PipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineTessellationStateCreateInfo( VkPipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PipelineTessellationStateCreateInfo & operator=( VkPipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+    PipelineTessellationStateCreateInfo & operator=( PipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineTessellationStateCreateInfo ) );
+      return *this;
+    }
+
+    PipelineTessellationStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineTessellationStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    PipelineTessellationStateCreateInfo & setPatchControlPoints( uint32_t patchControlPoints_ ) VULKAN_HPP_NOEXCEPT
+    {
+      patchControlPoints = patchControlPoints_;
+      return *this;
+    }
+
+
+    operator VkPipelineTessellationStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineTessellationStateCreateInfo*>( this );
+    }
+
+    operator VkPipelineTessellationStateCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineTessellationStateCreateInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PipelineTessellationStateCreateInfo const& ) const = default;
+#else
+    bool operator==( PipelineTessellationStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( patchControlPoints == rhs.patchControlPoints );
+    }
+
+    bool operator!=( PipelineTessellationStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineTessellationStateCreateInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags = {};
+    uint32_t patchControlPoints = {};
+
+  };
+  static_assert( sizeof( PipelineTessellationStateCreateInfo ) == sizeof( VkPipelineTessellationStateCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineTessellationStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineTessellationStateCreateInfo>
+  {
+    using Type = PipelineTessellationStateCreateInfo;
+  };
+
+  struct PipelineViewportStateCreateInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportStateCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PipelineViewportStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_ = {}, uint32_t viewportCount_ = {}, const VULKAN_HPP_NAMESPACE::Viewport* pViewports_ = {}, uint32_t scissorCount_ = {}, const VULKAN_HPP_NAMESPACE::Rect2D* pScissors_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), viewportCount( viewportCount_ ), pViewports( pViewports_ ), scissorCount( scissorCount_ ), pScissors( pScissors_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineViewportStateCreateInfo( PipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineViewportStateCreateInfo( VkPipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    PipelineViewportStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors_ = {} )
+    : flags( flags_ ), viewportCount( static_cast<uint32_t>( viewports_.size() ) ), pViewports( viewports_.data() ), scissorCount( static_cast<uint32_t>( scissors_.size() ) ), pScissors( scissors_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PipelineViewportStateCreateInfo & operator=( VkPipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+    PipelineViewportStateCreateInfo & operator=( PipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineViewportStateCreateInfo ) );
+      return *this;
+    }
+
+    PipelineViewportStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineViewportStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    PipelineViewportStateCreateInfo & setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      viewportCount = viewportCount_;
+      return *this;
+    }
+
+    PipelineViewportStateCreateInfo & setPViewports( const VULKAN_HPP_NAMESPACE::Viewport* pViewports_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pViewports = pViewports_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    PipelineViewportStateCreateInfo & setViewports( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports_ ) VULKAN_HPP_NOEXCEPT
+    {
+      viewportCount = static_cast<uint32_t>( viewports_.size() );
+      pViewports = viewports_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    PipelineViewportStateCreateInfo & setScissorCount( uint32_t scissorCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      scissorCount = scissorCount_;
+      return *this;
+    }
+
+    PipelineViewportStateCreateInfo & setPScissors( const VULKAN_HPP_NAMESPACE::Rect2D* pScissors_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pScissors = pScissors_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    PipelineViewportStateCreateInfo & setScissors( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors_ ) VULKAN_HPP_NOEXCEPT
+    {
+      scissorCount = static_cast<uint32_t>( scissors_.size() );
+      pScissors = scissors_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkPipelineViewportStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineViewportStateCreateInfo*>( this );
+    }
+
+    operator VkPipelineViewportStateCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineViewportStateCreateInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PipelineViewportStateCreateInfo const& ) const = default;
+#else
+    bool operator==( PipelineViewportStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( viewportCount == rhs.viewportCount )
+          && ( pViewports == rhs.pViewports )
+          && ( scissorCount == rhs.scissorCount )
+          && ( pScissors == rhs.pScissors );
+    }
+
+    bool operator!=( PipelineViewportStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportStateCreateInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags = {};
+    uint32_t viewportCount = {};
+    const VULKAN_HPP_NAMESPACE::Viewport* pViewports = {};
+    uint32_t scissorCount = {};
+    const VULKAN_HPP_NAMESPACE::Rect2D* pScissors = {};
+
+  };
+  static_assert( sizeof( PipelineViewportStateCreateInfo ) == sizeof( VkPipelineViewportStateCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineViewportStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineViewportStateCreateInfo>
+  {
+    using Type = PipelineViewportStateCreateInfo;
+  };
+
+  struct PipelineRasterizationStateCreateInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationStateCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PipelineRasterizationStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable_ = {}, VULKAN_HPP_NAMESPACE::PolygonMode polygonMode_ = VULKAN_HPP_NAMESPACE::PolygonMode::eFill, VULKAN_HPP_NAMESPACE::CullModeFlags cullMode_ = {}, VULKAN_HPP_NAMESPACE::FrontFace frontFace_ = VULKAN_HPP_NAMESPACE::FrontFace::eCounterClockwise, VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable_ = {}, float depthBiasConstantFactor_ = {}, float depthBiasClamp_ = {}, float depthBiasSlopeFactor_ = {}, float lineWidth_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), depthClampEnable( depthClampEnable_ ), rasterizerDiscardEnable( rasterizerDiscardEnable_ ), polygonMode( polygonMode_ ), cullMode( cullMode_ ), frontFace( frontFace_ ), depthBiasEnable( depthBiasEnable_ ), depthBiasConstantFactor( depthBiasConstantFactor_ ), depthBiasClamp( depthBiasClamp_ ), depthBiasSlopeFactor( depthBiasSlopeFactor_ ), lineWidth( lineWidth_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineRasterizationStateCreateInfo( PipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineRasterizationStateCreateInfo( VkPipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PipelineRasterizationStateCreateInfo & operator=( VkPipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+    PipelineRasterizationStateCreateInfo & operator=( PipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineRasterizationStateCreateInfo ) );
+      return *this;
+    }
+
+    PipelineRasterizationStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineRasterizationStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    PipelineRasterizationStateCreateInfo & setDepthClampEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depthClampEnable = depthClampEnable_;
+      return *this;
+    }
+
+    PipelineRasterizationStateCreateInfo & setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      rasterizerDiscardEnable = rasterizerDiscardEnable_;
+      return *this;
+    }
+
+    PipelineRasterizationStateCreateInfo & setPolygonMode( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      polygonMode = polygonMode_;
+      return *this;
+    }
+
+    PipelineRasterizationStateCreateInfo & setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      cullMode = cullMode_;
+      return *this;
+    }
+
+    PipelineRasterizationStateCreateInfo & setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace_ ) VULKAN_HPP_NOEXCEPT
+    {
+      frontFace = frontFace_;
+      return *this;
+    }
+
+    PipelineRasterizationStateCreateInfo & setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depthBiasEnable = depthBiasEnable_;
+      return *this;
+    }
+
+    PipelineRasterizationStateCreateInfo & setDepthBiasConstantFactor( float depthBiasConstantFactor_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depthBiasConstantFactor = depthBiasConstantFactor_;
+      return *this;
+    }
+
+    PipelineRasterizationStateCreateInfo & setDepthBiasClamp( float depthBiasClamp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depthBiasClamp = depthBiasClamp_;
+      return *this;
+    }
+
+    PipelineRasterizationStateCreateInfo & setDepthBiasSlopeFactor( float depthBiasSlopeFactor_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depthBiasSlopeFactor = depthBiasSlopeFactor_;
+      return *this;
+    }
+
+    PipelineRasterizationStateCreateInfo & setLineWidth( float lineWidth_ ) VULKAN_HPP_NOEXCEPT
+    {
+      lineWidth = lineWidth_;
+      return *this;
+    }
+
+
+    operator VkPipelineRasterizationStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineRasterizationStateCreateInfo*>( this );
+    }
+
+    operator VkPipelineRasterizationStateCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineRasterizationStateCreateInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PipelineRasterizationStateCreateInfo const& ) const = default;
+#else
+    bool operator==( PipelineRasterizationStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( depthClampEnable == rhs.depthClampEnable )
+          && ( rasterizerDiscardEnable == rhs.rasterizerDiscardEnable )
+          && ( polygonMode == rhs.polygonMode )
+          && ( cullMode == rhs.cullMode )
+          && ( frontFace == rhs.frontFace )
+          && ( depthBiasEnable == rhs.depthBiasEnable )
+          && ( depthBiasConstantFactor == rhs.depthBiasConstantFactor )
+          && ( depthBiasClamp == rhs.depthBiasClamp )
+          && ( depthBiasSlopeFactor == rhs.depthBiasSlopeFactor )
+          && ( lineWidth == rhs.lineWidth );
+    }
+
+    bool operator!=( PipelineRasterizationStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateCreateInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags = {};
+    VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable = {};
+    VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable = {};
+    VULKAN_HPP_NAMESPACE::PolygonMode polygonMode = VULKAN_HPP_NAMESPACE::PolygonMode::eFill;
+    VULKAN_HPP_NAMESPACE::CullModeFlags cullMode = {};
+    VULKAN_HPP_NAMESPACE::FrontFace frontFace = VULKAN_HPP_NAMESPACE::FrontFace::eCounterClockwise;
+    VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable = {};
+    float depthBiasConstantFactor = {};
+    float depthBiasClamp = {};
+    float depthBiasSlopeFactor = {};
+    float lineWidth = {};
+
+  };
+  static_assert( sizeof( PipelineRasterizationStateCreateInfo ) == sizeof( VkPipelineRasterizationStateCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineRasterizationStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineRasterizationStateCreateInfo>
+  {
+    using Type = PipelineRasterizationStateCreateInfo;
+  };
+
+  struct PipelineMultisampleStateCreateInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineMultisampleStateCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PipelineMultisampleStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable_ = {}, float minSampleShading_ = {}, const VULKAN_HPP_NAMESPACE::SampleMask* pSampleMask_ = {}, VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), rasterizationSamples( rasterizationSamples_ ), sampleShadingEnable( sampleShadingEnable_ ), minSampleShading( minSampleShading_ ), pSampleMask( pSampleMask_ ), alphaToCoverageEnable( alphaToCoverageEnable_ ), alphaToOneEnable( alphaToOneEnable_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineMultisampleStateCreateInfo( PipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineMultisampleStateCreateInfo( VkPipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PipelineMultisampleStateCreateInfo & operator=( VkPipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+    PipelineMultisampleStateCreateInfo & operator=( PipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineMultisampleStateCreateInfo ) );
+      return *this;
+    }
+
+    PipelineMultisampleStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineMultisampleStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    PipelineMultisampleStateCreateInfo & setRasterizationSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ ) VULKAN_HPP_NOEXCEPT
     {
       rasterizationSamples = rasterizationSamples_;
       return *this;
     }
 
-    PipelineMultisampleStateCreateInfo& setSampleShadingEnable( Bool32 sampleShadingEnable_ )
+    PipelineMultisampleStateCreateInfo & setSampleShadingEnable( VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable_ ) VULKAN_HPP_NOEXCEPT
     {
       sampleShadingEnable = sampleShadingEnable_;
       return *this;
     }
 
-    PipelineMultisampleStateCreateInfo& setMinSampleShading( float minSampleShading_ )
+    PipelineMultisampleStateCreateInfo & setMinSampleShading( float minSampleShading_ ) VULKAN_HPP_NOEXCEPT
     {
       minSampleShading = minSampleShading_;
       return *this;
     }
 
-    PipelineMultisampleStateCreateInfo& setPSampleMask( const SampleMask* pSampleMask_ )
+    PipelineMultisampleStateCreateInfo & setPSampleMask( const VULKAN_HPP_NAMESPACE::SampleMask* pSampleMask_ ) VULKAN_HPP_NOEXCEPT
     {
       pSampleMask = pSampleMask_;
       return *this;
     }
 
-    PipelineMultisampleStateCreateInfo& setAlphaToCoverageEnable( Bool32 alphaToCoverageEnable_ )
+    PipelineMultisampleStateCreateInfo & setAlphaToCoverageEnable( VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable_ ) VULKAN_HPP_NOEXCEPT
     {
       alphaToCoverageEnable = alphaToCoverageEnable_;
       return *this;
     }
 
-    PipelineMultisampleStateCreateInfo& setAlphaToOneEnable( Bool32 alphaToOneEnable_ )
+    PipelineMultisampleStateCreateInfo & setAlphaToOneEnable( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable_ ) VULKAN_HPP_NOEXCEPT
     {
       alphaToOneEnable = alphaToOneEnable_;
       return *this;
     }
 
-    operator VkPipelineMultisampleStateCreateInfo const&() const
+
+    operator VkPipelineMultisampleStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkPipelineMultisampleStateCreateInfo*>(this);
+      return *reinterpret_cast<const VkPipelineMultisampleStateCreateInfo*>( this );
     }
 
-    operator VkPipelineMultisampleStateCreateInfo &()
+    operator VkPipelineMultisampleStateCreateInfo &() VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkPipelineMultisampleStateCreateInfo*>(this);
+      return *reinterpret_cast<VkPipelineMultisampleStateCreateInfo*>( this );
     }
 
-    bool operator==( PipelineMultisampleStateCreateInfo const& rhs ) const
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PipelineMultisampleStateCreateInfo const& ) const = default;
+#else
+    bool operator==( PipelineMultisampleStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
@@ -27290,194 +37252,867 @@
           && ( alphaToOneEnable == rhs.alphaToOneEnable );
     }
 
-    bool operator!=( PipelineMultisampleStateCreateInfo const& rhs ) const
+    bool operator!=( PipelineMultisampleStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
+#endif
 
-  private:
-    StructureType sType = StructureType::ePipelineMultisampleStateCreateInfo;
+
 
   public:
-    const void* pNext = nullptr;
-    PipelineMultisampleStateCreateFlags flags;
-    SampleCountFlagBits rasterizationSamples;
-    Bool32 sampleShadingEnable;
-    float minSampleShading;
-    const SampleMask* pSampleMask;
-    Bool32 alphaToCoverageEnable;
-    Bool32 alphaToOneEnable;
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineMultisampleStateCreateInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags = {};
+    VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
+    VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable = {};
+    float minSampleShading = {};
+    const VULKAN_HPP_NAMESPACE::SampleMask* pSampleMask = {};
+    VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable = {};
+    VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable = {};
+
   };
   static_assert( sizeof( PipelineMultisampleStateCreateInfo ) == sizeof( VkPipelineMultisampleStateCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineMultisampleStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
 
-  struct GraphicsPipelineCreateInfo
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineMultisampleStateCreateInfo>
   {
-    GraphicsPipelineCreateInfo( PipelineCreateFlags flags_ = PipelineCreateFlags(),
-                                uint32_t stageCount_ = 0,
-                                const PipelineShaderStageCreateInfo* pStages_ = nullptr,
-                                const PipelineVertexInputStateCreateInfo* pVertexInputState_ = nullptr,
-                                const PipelineInputAssemblyStateCreateInfo* pInputAssemblyState_ = nullptr,
-                                const PipelineTessellationStateCreateInfo* pTessellationState_ = nullptr,
-                                const PipelineViewportStateCreateInfo* pViewportState_ = nullptr,
-                                const PipelineRasterizationStateCreateInfo* pRasterizationState_ = nullptr,
-                                const PipelineMultisampleStateCreateInfo* pMultisampleState_ = nullptr,
-                                const PipelineDepthStencilStateCreateInfo* pDepthStencilState_ = nullptr,
-                                const PipelineColorBlendStateCreateInfo* pColorBlendState_ = nullptr,
-                                const PipelineDynamicStateCreateInfo* pDynamicState_ = nullptr,
-                                PipelineLayout layout_ = PipelineLayout(),
-                                RenderPass renderPass_ = RenderPass(),
-                                uint32_t subpass_ = 0,
-                                Pipeline basePipelineHandle_ = Pipeline(),
-                                int32_t basePipelineIndex_ = 0 )
-      : flags( flags_ )
-      , stageCount( stageCount_ )
-      , pStages( pStages_ )
-      , pVertexInputState( pVertexInputState_ )
-      , pInputAssemblyState( pInputAssemblyState_ )
-      , pTessellationState( pTessellationState_ )
-      , pViewportState( pViewportState_ )
-      , pRasterizationState( pRasterizationState_ )
-      , pMultisampleState( pMultisampleState_ )
-      , pDepthStencilState( pDepthStencilState_ )
-      , pColorBlendState( pColorBlendState_ )
-      , pDynamicState( pDynamicState_ )
-      , layout( layout_ )
-      , renderPass( renderPass_ )
-      , subpass( subpass_ )
-      , basePipelineHandle( basePipelineHandle_ )
-      , basePipelineIndex( basePipelineIndex_ )
-    {
-    }
+    using Type = PipelineMultisampleStateCreateInfo;
+  };
 
-    GraphicsPipelineCreateInfo( VkGraphicsPipelineCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( GraphicsPipelineCreateInfo ) );
-    }
+  struct StencilOpState
+  {
 
-    GraphicsPipelineCreateInfo& operator=( VkGraphicsPipelineCreateInfo const & rhs )
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR StencilOpState(VULKAN_HPP_NAMESPACE::StencilOp failOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep, VULKAN_HPP_NAMESPACE::StencilOp passOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep, VULKAN_HPP_NAMESPACE::StencilOp depthFailOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep, VULKAN_HPP_NAMESPACE::CompareOp compareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever, uint32_t compareMask_ = {}, uint32_t writeMask_ = {}, uint32_t reference_ = {}) VULKAN_HPP_NOEXCEPT
+    : failOp( failOp_ ), passOp( passOp_ ), depthFailOp( depthFailOp_ ), compareOp( compareOp_ ), compareMask( compareMask_ ), writeMask( writeMask_ ), reference( reference_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR StencilOpState( StencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    StencilOpState( VkStencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      memcpy( this, &rhs, sizeof( GraphicsPipelineCreateInfo ) );
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    StencilOpState & operator=( VkStencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::StencilOpState const *>( &rhs );
       return *this;
     }
-    GraphicsPipelineCreateInfo& setPNext( const void* pNext_ )
+
+    StencilOpState & operator=( StencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( StencilOpState ) );
+      return *this;
+    }
+
+    StencilOpState & setFailOp( VULKAN_HPP_NAMESPACE::StencilOp failOp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      failOp = failOp_;
+      return *this;
+    }
+
+    StencilOpState & setPassOp( VULKAN_HPP_NAMESPACE::StencilOp passOp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      passOp = passOp_;
+      return *this;
+    }
+
+    StencilOpState & setDepthFailOp( VULKAN_HPP_NAMESPACE::StencilOp depthFailOp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depthFailOp = depthFailOp_;
+      return *this;
+    }
+
+    StencilOpState & setCompareOp( VULKAN_HPP_NAMESPACE::CompareOp compareOp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      compareOp = compareOp_;
+      return *this;
+    }
+
+    StencilOpState & setCompareMask( uint32_t compareMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      compareMask = compareMask_;
+      return *this;
+    }
+
+    StencilOpState & setWriteMask( uint32_t writeMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      writeMask = writeMask_;
+      return *this;
+    }
+
+    StencilOpState & setReference( uint32_t reference_ ) VULKAN_HPP_NOEXCEPT
+    {
+      reference = reference_;
+      return *this;
+    }
+
+
+    operator VkStencilOpState const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkStencilOpState*>( this );
+    }
+
+    operator VkStencilOpState &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkStencilOpState*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( StencilOpState const& ) const = default;
+#else
+    bool operator==( StencilOpState const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( failOp == rhs.failOp )
+          && ( passOp == rhs.passOp )
+          && ( depthFailOp == rhs.depthFailOp )
+          && ( compareOp == rhs.compareOp )
+          && ( compareMask == rhs.compareMask )
+          && ( writeMask == rhs.writeMask )
+          && ( reference == rhs.reference );
+    }
+
+    bool operator!=( StencilOpState const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::StencilOp failOp = VULKAN_HPP_NAMESPACE::StencilOp::eKeep;
+    VULKAN_HPP_NAMESPACE::StencilOp passOp = VULKAN_HPP_NAMESPACE::StencilOp::eKeep;
+    VULKAN_HPP_NAMESPACE::StencilOp depthFailOp = VULKAN_HPP_NAMESPACE::StencilOp::eKeep;
+    VULKAN_HPP_NAMESPACE::CompareOp compareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever;
+    uint32_t compareMask = {};
+    uint32_t writeMask = {};
+    uint32_t reference = {};
+
+  };
+  static_assert( sizeof( StencilOpState ) == sizeof( VkStencilOpState ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<StencilOpState>::value, "struct wrapper is not a standard layout!" );
+
+  struct PipelineDepthStencilStateCreateInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineDepthStencilStateCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable_ = {}, VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever, VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable_ = {}, VULKAN_HPP_NAMESPACE::StencilOpState front_ = {}, VULKAN_HPP_NAMESPACE::StencilOpState back_ = {}, float minDepthBounds_ = {}, float maxDepthBounds_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), depthTestEnable( depthTestEnable_ ), depthWriteEnable( depthWriteEnable_ ), depthCompareOp( depthCompareOp_ ), depthBoundsTestEnable( depthBoundsTestEnable_ ), stencilTestEnable( stencilTestEnable_ ), front( front_ ), back( back_ ), minDepthBounds( minDepthBounds_ ), maxDepthBounds( maxDepthBounds_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateInfo( PipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineDepthStencilStateCreateInfo( VkPipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PipelineDepthStencilStateCreateInfo & operator=( VkPipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+    PipelineDepthStencilStateCreateInfo & operator=( PipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineDepthStencilStateCreateInfo ) );
+      return *this;
+    }
+
+    PipelineDepthStencilStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
       pNext = pNext_;
       return *this;
     }
 
-    GraphicsPipelineCreateInfo& setFlags( PipelineCreateFlags flags_ )
+    PipelineDepthStencilStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
     {
       flags = flags_;
       return *this;
     }
 
-    GraphicsPipelineCreateInfo& setStageCount( uint32_t stageCount_ )
+    PipelineDepthStencilStateCreateInfo & setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depthTestEnable = depthTestEnable_;
+      return *this;
+    }
+
+    PipelineDepthStencilStateCreateInfo & setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depthWriteEnable = depthWriteEnable_;
+      return *this;
+    }
+
+    PipelineDepthStencilStateCreateInfo & setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depthCompareOp = depthCompareOp_;
+      return *this;
+    }
+
+    PipelineDepthStencilStateCreateInfo & setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depthBoundsTestEnable = depthBoundsTestEnable_;
+      return *this;
+    }
+
+    PipelineDepthStencilStateCreateInfo & setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stencilTestEnable = stencilTestEnable_;
+      return *this;
+    }
+
+    PipelineDepthStencilStateCreateInfo & setFront( VULKAN_HPP_NAMESPACE::StencilOpState const & front_ ) VULKAN_HPP_NOEXCEPT
+    {
+      front = front_;
+      return *this;
+    }
+
+    PipelineDepthStencilStateCreateInfo & setBack( VULKAN_HPP_NAMESPACE::StencilOpState const & back_ ) VULKAN_HPP_NOEXCEPT
+    {
+      back = back_;
+      return *this;
+    }
+
+    PipelineDepthStencilStateCreateInfo & setMinDepthBounds( float minDepthBounds_ ) VULKAN_HPP_NOEXCEPT
+    {
+      minDepthBounds = minDepthBounds_;
+      return *this;
+    }
+
+    PipelineDepthStencilStateCreateInfo & setMaxDepthBounds( float maxDepthBounds_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxDepthBounds = maxDepthBounds_;
+      return *this;
+    }
+
+
+    operator VkPipelineDepthStencilStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineDepthStencilStateCreateInfo*>( this );
+    }
+
+    operator VkPipelineDepthStencilStateCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineDepthStencilStateCreateInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PipelineDepthStencilStateCreateInfo const& ) const = default;
+#else
+    bool operator==( PipelineDepthStencilStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( depthTestEnable == rhs.depthTestEnable )
+          && ( depthWriteEnable == rhs.depthWriteEnable )
+          && ( depthCompareOp == rhs.depthCompareOp )
+          && ( depthBoundsTestEnable == rhs.depthBoundsTestEnable )
+          && ( stencilTestEnable == rhs.stencilTestEnable )
+          && ( front == rhs.front )
+          && ( back == rhs.back )
+          && ( minDepthBounds == rhs.minDepthBounds )
+          && ( maxDepthBounds == rhs.maxDepthBounds );
+    }
+
+    bool operator!=( PipelineDepthStencilStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDepthStencilStateCreateInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags = {};
+    VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable = {};
+    VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable = {};
+    VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever;
+    VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable = {};
+    VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable = {};
+    VULKAN_HPP_NAMESPACE::StencilOpState front = {};
+    VULKAN_HPP_NAMESPACE::StencilOpState back = {};
+    float minDepthBounds = {};
+    float maxDepthBounds = {};
+
+  };
+  static_assert( sizeof( PipelineDepthStencilStateCreateInfo ) == sizeof( VkPipelineDepthStencilStateCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineDepthStencilStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineDepthStencilStateCreateInfo>
+  {
+    using Type = PipelineDepthStencilStateCreateInfo;
+  };
+
+  struct PipelineColorBlendAttachmentState
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PipelineColorBlendAttachmentState(VULKAN_HPP_NAMESPACE::Bool32 blendEnable_ = {}, VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd, VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd, VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask_ = {}) VULKAN_HPP_NOEXCEPT
+    : blendEnable( blendEnable_ ), srcColorBlendFactor( srcColorBlendFactor_ ), dstColorBlendFactor( dstColorBlendFactor_ ), colorBlendOp( colorBlendOp_ ), srcAlphaBlendFactor( srcAlphaBlendFactor_ ), dstAlphaBlendFactor( dstAlphaBlendFactor_ ), alphaBlendOp( alphaBlendOp_ ), colorWriteMask( colorWriteMask_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineColorBlendAttachmentState( PipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineColorBlendAttachmentState( VkPipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PipelineColorBlendAttachmentState & operator=( VkPipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState const *>( &rhs );
+      return *this;
+    }
+
+    PipelineColorBlendAttachmentState & operator=( PipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineColorBlendAttachmentState ) );
+      return *this;
+    }
+
+    PipelineColorBlendAttachmentState & setBlendEnable( VULKAN_HPP_NAMESPACE::Bool32 blendEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      blendEnable = blendEnable_;
+      return *this;
+    }
+
+    PipelineColorBlendAttachmentState & setSrcColorBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcColorBlendFactor = srcColorBlendFactor_;
+      return *this;
+    }
+
+    PipelineColorBlendAttachmentState & setDstColorBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstColorBlendFactor = dstColorBlendFactor_;
+      return *this;
+    }
+
+    PipelineColorBlendAttachmentState & setColorBlendOp( VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      colorBlendOp = colorBlendOp_;
+      return *this;
+    }
+
+    PipelineColorBlendAttachmentState & setSrcAlphaBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcAlphaBlendFactor = srcAlphaBlendFactor_;
+      return *this;
+    }
+
+    PipelineColorBlendAttachmentState & setDstAlphaBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstAlphaBlendFactor = dstAlphaBlendFactor_;
+      return *this;
+    }
+
+    PipelineColorBlendAttachmentState & setAlphaBlendOp( VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      alphaBlendOp = alphaBlendOp_;
+      return *this;
+    }
+
+    PipelineColorBlendAttachmentState & setColorWriteMask( VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      colorWriteMask = colorWriteMask_;
+      return *this;
+    }
+
+
+    operator VkPipelineColorBlendAttachmentState const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineColorBlendAttachmentState*>( this );
+    }
+
+    operator VkPipelineColorBlendAttachmentState &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineColorBlendAttachmentState*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PipelineColorBlendAttachmentState const& ) const = default;
+#else
+    bool operator==( PipelineColorBlendAttachmentState const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( blendEnable == rhs.blendEnable )
+          && ( srcColorBlendFactor == rhs.srcColorBlendFactor )
+          && ( dstColorBlendFactor == rhs.dstColorBlendFactor )
+          && ( colorBlendOp == rhs.colorBlendOp )
+          && ( srcAlphaBlendFactor == rhs.srcAlphaBlendFactor )
+          && ( dstAlphaBlendFactor == rhs.dstAlphaBlendFactor )
+          && ( alphaBlendOp == rhs.alphaBlendOp )
+          && ( colorWriteMask == rhs.colorWriteMask );
+    }
+
+    bool operator!=( PipelineColorBlendAttachmentState const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::Bool32 blendEnable = {};
+    VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero;
+    VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero;
+    VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd;
+    VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero;
+    VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero;
+    VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd;
+    VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask = {};
+
+  };
+  static_assert( sizeof( PipelineColorBlendAttachmentState ) == sizeof( VkPipelineColorBlendAttachmentState ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineColorBlendAttachmentState>::value, "struct wrapper is not a standard layout!" );
+
+  struct PipelineColorBlendStateCreateInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineColorBlendStateCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_ = {}, VULKAN_HPP_NAMESPACE::LogicOp logicOp_ = VULKAN_HPP_NAMESPACE::LogicOp::eClear, uint32_t attachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState* pAttachments_ = {}, std::array<float,4> const& blendConstants_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), logicOpEnable( logicOpEnable_ ), logicOp( logicOp_ ), attachmentCount( attachmentCount_ ), pAttachments( pAttachments_ ), blendConstants( blendConstants_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo( PipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineColorBlendStateCreateInfo( VkPipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    PipelineColorBlendStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_, VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_, VULKAN_HPP_NAMESPACE::LogicOp logicOp_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState> const & attachments_, std::array<float,4> const& blendConstants_ = {} )
+    : flags( flags_ ), logicOpEnable( logicOpEnable_ ), logicOp( logicOp_ ), attachmentCount( static_cast<uint32_t>( attachments_.size() ) ), pAttachments( attachments_.data() ), blendConstants( blendConstants_ )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PipelineColorBlendStateCreateInfo & operator=( VkPipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+    PipelineColorBlendStateCreateInfo & operator=( PipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineColorBlendStateCreateInfo ) );
+      return *this;
+    }
+
+    PipelineColorBlendStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineColorBlendStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    PipelineColorBlendStateCreateInfo & setLogicOpEnable( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      logicOpEnable = logicOpEnable_;
+      return *this;
+    }
+
+    PipelineColorBlendStateCreateInfo & setLogicOp( VULKAN_HPP_NAMESPACE::LogicOp logicOp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      logicOp = logicOp_;
+      return *this;
+    }
+
+    PipelineColorBlendStateCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      attachmentCount = attachmentCount_;
+      return *this;
+    }
+
+    PipelineColorBlendStateCreateInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState* pAttachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pAttachments = pAttachments_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    PipelineColorBlendStateCreateInfo & setAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState> const & attachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      attachmentCount = static_cast<uint32_t>( attachments_.size() );
+      pAttachments = attachments_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    PipelineColorBlendStateCreateInfo & setBlendConstants( std::array<float,4> blendConstants_ ) VULKAN_HPP_NOEXCEPT
+    {
+      blendConstants = blendConstants_;
+      return *this;
+    }
+
+
+    operator VkPipelineColorBlendStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineColorBlendStateCreateInfo*>( this );
+    }
+
+    operator VkPipelineColorBlendStateCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineColorBlendStateCreateInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PipelineColorBlendStateCreateInfo const& ) const = default;
+#else
+    bool operator==( PipelineColorBlendStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( logicOpEnable == rhs.logicOpEnable )
+          && ( logicOp == rhs.logicOp )
+          && ( attachmentCount == rhs.attachmentCount )
+          && ( pAttachments == rhs.pAttachments )
+          && ( blendConstants == rhs.blendConstants );
+    }
+
+    bool operator!=( PipelineColorBlendStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineColorBlendStateCreateInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags = {};
+    VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable = {};
+    VULKAN_HPP_NAMESPACE::LogicOp logicOp = VULKAN_HPP_NAMESPACE::LogicOp::eClear;
+    uint32_t attachmentCount = {};
+    const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState* pAttachments = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 4> blendConstants = {};
+
+  };
+  static_assert( sizeof( PipelineColorBlendStateCreateInfo ) == sizeof( VkPipelineColorBlendStateCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineColorBlendStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineColorBlendStateCreateInfo>
+  {
+    using Type = PipelineColorBlendStateCreateInfo;
+  };
+
+  struct PipelineDynamicStateCreateInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineDynamicStateCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PipelineDynamicStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_ = {}, uint32_t dynamicStateCount_ = {}, const VULKAN_HPP_NAMESPACE::DynamicState* pDynamicStates_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), dynamicStateCount( dynamicStateCount_ ), pDynamicStates( pDynamicStates_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineDynamicStateCreateInfo( PipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineDynamicStateCreateInfo( VkPipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    PipelineDynamicStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DynamicState> const & dynamicStates_ )
+    : flags( flags_ ), dynamicStateCount( static_cast<uint32_t>( dynamicStates_.size() ) ), pDynamicStates( dynamicStates_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PipelineDynamicStateCreateInfo & operator=( VkPipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+    PipelineDynamicStateCreateInfo & operator=( PipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineDynamicStateCreateInfo ) );
+      return *this;
+    }
+
+    PipelineDynamicStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineDynamicStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    PipelineDynamicStateCreateInfo & setDynamicStateCount( uint32_t dynamicStateCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dynamicStateCount = dynamicStateCount_;
+      return *this;
+    }
+
+    PipelineDynamicStateCreateInfo & setPDynamicStates( const VULKAN_HPP_NAMESPACE::DynamicState* pDynamicStates_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pDynamicStates = pDynamicStates_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    PipelineDynamicStateCreateInfo & setDynamicStates( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DynamicState> const & dynamicStates_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dynamicStateCount = static_cast<uint32_t>( dynamicStates_.size() );
+      pDynamicStates = dynamicStates_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkPipelineDynamicStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineDynamicStateCreateInfo*>( this );
+    }
+
+    operator VkPipelineDynamicStateCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineDynamicStateCreateInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PipelineDynamicStateCreateInfo const& ) const = default;
+#else
+    bool operator==( PipelineDynamicStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( dynamicStateCount == rhs.dynamicStateCount )
+          && ( pDynamicStates == rhs.pDynamicStates );
+    }
+
+    bool operator!=( PipelineDynamicStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDynamicStateCreateInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags = {};
+    uint32_t dynamicStateCount = {};
+    const VULKAN_HPP_NAMESPACE::DynamicState* pDynamicStates = {};
+
+  };
+  static_assert( sizeof( PipelineDynamicStateCreateInfo ) == sizeof( VkPipelineDynamicStateCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineDynamicStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineDynamicStateCreateInfo>
+  {
+    using Type = PipelineDynamicStateCreateInfo;
+  };
+
+  struct GraphicsPipelineCreateInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGraphicsPipelineCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo(VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, uint32_t stageCount_ = {}, const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ = {}, const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo* pVertexInputState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo* pInputAssemblyState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo* pTessellationState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo* pViewportState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo* pRasterizationState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo* pMultisampleState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo* pDepthStencilState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo* pColorBlendState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo* pDynamicState_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, uint32_t subpass_ = {}, VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), stageCount( stageCount_ ), pStages( pStages_ ), pVertexInputState( pVertexInputState_ ), pInputAssemblyState( pInputAssemblyState_ ), pTessellationState( pTessellationState_ ), pViewportState( pViewportState_ ), pRasterizationState( pRasterizationState_ ), pMultisampleState( pMultisampleState_ ), pDepthStencilState( pDepthStencilState_ ), pColorBlendState( pColorBlendState_ ), pDynamicState( pDynamicState_ ), layout( layout_ ), renderPass( renderPass_ ), subpass( subpass_ ), basePipelineHandle( basePipelineHandle_ ), basePipelineIndex( basePipelineIndex_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo( GraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    GraphicsPipelineCreateInfo( VkGraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    GraphicsPipelineCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const & stages_, const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo* pVertexInputState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo* pInputAssemblyState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo* pTessellationState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo* pViewportState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo* pRasterizationState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo* pMultisampleState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo* pDepthStencilState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo* pColorBlendState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo* pDynamicState_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, uint32_t subpass_ = {}, VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {} )
+    : flags( flags_ ), stageCount( static_cast<uint32_t>( stages_.size() ) ), pStages( stages_.data() ), pVertexInputState( pVertexInputState_ ), pInputAssemblyState( pInputAssemblyState_ ), pTessellationState( pTessellationState_ ), pViewportState( pViewportState_ ), pRasterizationState( pRasterizationState_ ), pMultisampleState( pMultisampleState_ ), pDepthStencilState( pDepthStencilState_ ), pColorBlendState( pColorBlendState_ ), pDynamicState( pDynamicState_ ), layout( layout_ ), renderPass( renderPass_ ), subpass( subpass_ ), basePipelineHandle( basePipelineHandle_ ), basePipelineIndex( basePipelineIndex_ )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    GraphicsPipelineCreateInfo & operator=( VkGraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+    GraphicsPipelineCreateInfo & operator=( GraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( GraphicsPipelineCreateInfo ) );
+      return *this;
+    }
+
+    GraphicsPipelineCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    GraphicsPipelineCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    GraphicsPipelineCreateInfo & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT
     {
       stageCount = stageCount_;
       return *this;
     }
 
-    GraphicsPipelineCreateInfo& setPStages( const PipelineShaderStageCreateInfo* pStages_ )
+    GraphicsPipelineCreateInfo & setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ ) VULKAN_HPP_NOEXCEPT
     {
       pStages = pStages_;
       return *this;
     }
 
-    GraphicsPipelineCreateInfo& setPVertexInputState( const PipelineVertexInputStateCreateInfo* pVertexInputState_ )
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    GraphicsPipelineCreateInfo & setStages( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const & stages_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stageCount = static_cast<uint32_t>( stages_.size() );
+      pStages = stages_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    GraphicsPipelineCreateInfo & setPVertexInputState( const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo* pVertexInputState_ ) VULKAN_HPP_NOEXCEPT
     {
       pVertexInputState = pVertexInputState_;
       return *this;
     }
 
-    GraphicsPipelineCreateInfo& setPInputAssemblyState( const PipelineInputAssemblyStateCreateInfo* pInputAssemblyState_ )
+    GraphicsPipelineCreateInfo & setPInputAssemblyState( const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo* pInputAssemblyState_ ) VULKAN_HPP_NOEXCEPT
     {
       pInputAssemblyState = pInputAssemblyState_;
       return *this;
     }
 
-    GraphicsPipelineCreateInfo& setPTessellationState( const PipelineTessellationStateCreateInfo* pTessellationState_ )
+    GraphicsPipelineCreateInfo & setPTessellationState( const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo* pTessellationState_ ) VULKAN_HPP_NOEXCEPT
     {
       pTessellationState = pTessellationState_;
       return *this;
     }
 
-    GraphicsPipelineCreateInfo& setPViewportState( const PipelineViewportStateCreateInfo* pViewportState_ )
+    GraphicsPipelineCreateInfo & setPViewportState( const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo* pViewportState_ ) VULKAN_HPP_NOEXCEPT
     {
       pViewportState = pViewportState_;
       return *this;
     }
 
-    GraphicsPipelineCreateInfo& setPRasterizationState( const PipelineRasterizationStateCreateInfo* pRasterizationState_ )
+    GraphicsPipelineCreateInfo & setPRasterizationState( const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo* pRasterizationState_ ) VULKAN_HPP_NOEXCEPT
     {
       pRasterizationState = pRasterizationState_;
       return *this;
     }
 
-    GraphicsPipelineCreateInfo& setPMultisampleState( const PipelineMultisampleStateCreateInfo* pMultisampleState_ )
+    GraphicsPipelineCreateInfo & setPMultisampleState( const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo* pMultisampleState_ ) VULKAN_HPP_NOEXCEPT
     {
       pMultisampleState = pMultisampleState_;
       return *this;
     }
 
-    GraphicsPipelineCreateInfo& setPDepthStencilState( const PipelineDepthStencilStateCreateInfo* pDepthStencilState_ )
+    GraphicsPipelineCreateInfo & setPDepthStencilState( const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo* pDepthStencilState_ ) VULKAN_HPP_NOEXCEPT
     {
       pDepthStencilState = pDepthStencilState_;
       return *this;
     }
 
-    GraphicsPipelineCreateInfo& setPColorBlendState( const PipelineColorBlendStateCreateInfo* pColorBlendState_ )
+    GraphicsPipelineCreateInfo & setPColorBlendState( const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo* pColorBlendState_ ) VULKAN_HPP_NOEXCEPT
     {
       pColorBlendState = pColorBlendState_;
       return *this;
     }
 
-    GraphicsPipelineCreateInfo& setPDynamicState( const PipelineDynamicStateCreateInfo* pDynamicState_ )
+    GraphicsPipelineCreateInfo & setPDynamicState( const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo* pDynamicState_ ) VULKAN_HPP_NOEXCEPT
     {
       pDynamicState = pDynamicState_;
       return *this;
     }
 
-    GraphicsPipelineCreateInfo& setLayout( PipelineLayout layout_ )
+    GraphicsPipelineCreateInfo & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
     {
       layout = layout_;
       return *this;
     }
 
-    GraphicsPipelineCreateInfo& setRenderPass( RenderPass renderPass_ )
+    GraphicsPipelineCreateInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
     {
       renderPass = renderPass_;
       return *this;
     }
 
-    GraphicsPipelineCreateInfo& setSubpass( uint32_t subpass_ )
+    GraphicsPipelineCreateInfo & setSubpass( uint32_t subpass_ ) VULKAN_HPP_NOEXCEPT
     {
       subpass = subpass_;
       return *this;
     }
 
-    GraphicsPipelineCreateInfo& setBasePipelineHandle( Pipeline basePipelineHandle_ )
+    GraphicsPipelineCreateInfo & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT
     {
       basePipelineHandle = basePipelineHandle_;
       return *this;
     }
 
-    GraphicsPipelineCreateInfo& setBasePipelineIndex( int32_t basePipelineIndex_ )
+    GraphicsPipelineCreateInfo & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT
     {
       basePipelineIndex = basePipelineIndex_;
       return *this;
     }
 
-    operator VkGraphicsPipelineCreateInfo const&() const
+
+    operator VkGraphicsPipelineCreateInfo const&() const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkGraphicsPipelineCreateInfo*>(this);
+      return *reinterpret_cast<const VkGraphicsPipelineCreateInfo*>( this );
     }
 
-    operator VkGraphicsPipelineCreateInfo &()
+    operator VkGraphicsPipelineCreateInfo &() VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkGraphicsPipelineCreateInfo*>(this);
+      return *reinterpret_cast<VkGraphicsPipelineCreateInfo*>( this );
     }
 
-    bool operator==( GraphicsPipelineCreateInfo const& rhs ) const
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( GraphicsPipelineCreateInfo const& ) const = default;
+#else
+    bool operator==( GraphicsPipelineCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
@@ -27500,49 +38135,14531 @@
           && ( basePipelineIndex == rhs.basePipelineIndex );
     }
 
-    bool operator!=( GraphicsPipelineCreateInfo const& rhs ) const
+    bool operator!=( GraphicsPipelineCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
+#endif
 
-  private:
-    StructureType sType = StructureType::eGraphicsPipelineCreateInfo;
+
 
   public:
-    const void* pNext = nullptr;
-    PipelineCreateFlags flags;
-    uint32_t stageCount;
-    const PipelineShaderStageCreateInfo* pStages;
-    const PipelineVertexInputStateCreateInfo* pVertexInputState;
-    const PipelineInputAssemblyStateCreateInfo* pInputAssemblyState;
-    const PipelineTessellationStateCreateInfo* pTessellationState;
-    const PipelineViewportStateCreateInfo* pViewportState;
-    const PipelineRasterizationStateCreateInfo* pRasterizationState;
-    const PipelineMultisampleStateCreateInfo* pMultisampleState;
-    const PipelineDepthStencilStateCreateInfo* pDepthStencilState;
-    const PipelineColorBlendStateCreateInfo* pColorBlendState;
-    const PipelineDynamicStateCreateInfo* pDynamicState;
-    PipelineLayout layout;
-    RenderPass renderPass;
-    uint32_t subpass;
-    Pipeline basePipelineHandle;
-    int32_t basePipelineIndex;
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsPipelineCreateInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {};
+    uint32_t stageCount = {};
+    const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages = {};
+    const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo* pVertexInputState = {};
+    const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo* pInputAssemblyState = {};
+    const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo* pTessellationState = {};
+    const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo* pViewportState = {};
+    const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo* pRasterizationState = {};
+    const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo* pMultisampleState = {};
+    const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo* pDepthStencilState = {};
+    const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo* pColorBlendState = {};
+    const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo* pDynamicState = {};
+    VULKAN_HPP_NAMESPACE::PipelineLayout layout = {};
+    VULKAN_HPP_NAMESPACE::RenderPass renderPass = {};
+    uint32_t subpass = {};
+    VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {};
+    int32_t basePipelineIndex = {};
+
   };
   static_assert( sizeof( GraphicsPipelineCreateInfo ) == sizeof( VkGraphicsPipelineCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<GraphicsPipelineCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eGraphicsPipelineCreateInfo>
+  {
+    using Type = GraphicsPipelineCreateInfo;
+  };
+
+  struct ImageCreateInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ImageCreateInfo(VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ImageType imageType_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}, uint32_t mipLevels_ = {}, uint32_t arrayLayers_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, uint32_t queueFamilyIndexCount_ = {}, const uint32_t* pQueueFamilyIndices_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), imageType( imageType_ ), format( format_ ), extent( extent_ ), mipLevels( mipLevels_ ), arrayLayers( arrayLayers_ ), samples( samples_ ), tiling( tiling_ ), usage( usage_ ), sharingMode( sharingMode_ ), queueFamilyIndexCount( queueFamilyIndexCount_ ), pQueueFamilyIndices( pQueueFamilyIndices_ ), initialLayout( initialLayout_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImageCreateInfo( ImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImageCreateInfo( VkImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    ImageCreateInfo( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_, VULKAN_HPP_NAMESPACE::ImageType imageType_, VULKAN_HPP_NAMESPACE::Format format_, VULKAN_HPP_NAMESPACE::Extent3D extent_, uint32_t mipLevels_, uint32_t arrayLayers_, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_, VULKAN_HPP_NAMESPACE::ImageTiling tiling_, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_, VULKAN_HPP_NAMESPACE::SharingMode sharingMode_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_, VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined )
+    : flags( flags_ ), imageType( imageType_ ), format( format_ ), extent( extent_ ), mipLevels( mipLevels_ ), arrayLayers( arrayLayers_ ), samples( samples_ ), tiling( tiling_ ), usage( usage_ ), sharingMode( sharingMode_ ), queueFamilyIndexCount( static_cast<uint32_t>( queueFamilyIndices_.size() ) ), pQueueFamilyIndices( queueFamilyIndices_.data() ), initialLayout( initialLayout_ )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ImageCreateInfo & operator=( VkImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+    ImageCreateInfo & operator=( ImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ImageCreateInfo ) );
+      return *this;
+    }
+
+    ImageCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImageCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    ImageCreateInfo & setImageType( VULKAN_HPP_NAMESPACE::ImageType imageType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageType = imageType_;
+      return *this;
+    }
+
+    ImageCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
+    {
+      format = format_;
+      return *this;
+    }
+
+    ImageCreateInfo & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
+    {
+      extent = extent_;
+      return *this;
+    }
+
+    ImageCreateInfo & setMipLevels( uint32_t mipLevels_ ) VULKAN_HPP_NOEXCEPT
+    {
+      mipLevels = mipLevels_;
+      return *this;
+    }
+
+    ImageCreateInfo & setArrayLayers( uint32_t arrayLayers_ ) VULKAN_HPP_NOEXCEPT
+    {
+      arrayLayers = arrayLayers_;
+      return *this;
+    }
+
+    ImageCreateInfo & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT
+    {
+      samples = samples_;
+      return *this;
+    }
+
+    ImageCreateInfo & setTiling( VULKAN_HPP_NAMESPACE::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT
+    {
+      tiling = tiling_;
+      return *this;
+    }
+
+    ImageCreateInfo & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      usage = usage_;
+      return *this;
+    }
+
+    ImageCreateInfo & setSharingMode( VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sharingMode = sharingMode_;
+      return *this;
+    }
+
+    ImageCreateInfo & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queueFamilyIndexCount = queueFamilyIndexCount_;
+      return *this;
+    }
+
+    ImageCreateInfo & setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pQueueFamilyIndices = pQueueFamilyIndices_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    ImageCreateInfo & setQueueFamilyIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queueFamilyIndexCount = static_cast<uint32_t>( queueFamilyIndices_.size() );
+      pQueueFamilyIndices = queueFamilyIndices_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    ImageCreateInfo & setInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      initialLayout = initialLayout_;
+      return *this;
+    }
+
+
+    operator VkImageCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageCreateInfo*>( this );
+    }
+
+    operator VkImageCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageCreateInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ImageCreateInfo const& ) const = default;
+#else
+    bool operator==( ImageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( imageType == rhs.imageType )
+          && ( format == rhs.format )
+          && ( extent == rhs.extent )
+          && ( mipLevels == rhs.mipLevels )
+          && ( arrayLayers == rhs.arrayLayers )
+          && ( samples == rhs.samples )
+          && ( tiling == rhs.tiling )
+          && ( usage == rhs.usage )
+          && ( sharingMode == rhs.sharingMode )
+          && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount )
+          && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices )
+          && ( initialLayout == rhs.initialLayout );
+    }
+
+    bool operator!=( ImageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageCreateInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {};
+    VULKAN_HPP_NAMESPACE::ImageType imageType = VULKAN_HPP_NAMESPACE::ImageType::e1D;
+    VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+    VULKAN_HPP_NAMESPACE::Extent3D extent = {};
+    uint32_t mipLevels = {};
+    uint32_t arrayLayers = {};
+    VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
+    VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal;
+    VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {};
+    VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive;
+    uint32_t queueFamilyIndexCount = {};
+    const uint32_t* pQueueFamilyIndices = {};
+    VULKAN_HPP_NAMESPACE::ImageLayout initialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+
+  };
+  static_assert( sizeof( ImageCreateInfo ) == sizeof( VkImageCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImageCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eImageCreateInfo>
+  {
+    using Type = ImageCreateInfo;
+  };
+
+  struct ImageViewCreateInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ImageViewCreateInfo(VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Image image_ = {}, VULKAN_HPP_NAMESPACE::ImageViewType viewType_ = VULKAN_HPP_NAMESPACE::ImageViewType::e1D, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::ComponentMapping components_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), image( image_ ), viewType( viewType_ ), format( format_ ), components( components_ ), subresourceRange( subresourceRange_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImageViewCreateInfo( ImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImageViewCreateInfo( VkImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ImageViewCreateInfo & operator=( VkImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageViewCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+    ImageViewCreateInfo & operator=( ImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ImageViewCreateInfo ) );
+      return *this;
+    }
+
+    ImageViewCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImageViewCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    ImageViewCreateInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
+    {
+      image = image_;
+      return *this;
+    }
+
+    ImageViewCreateInfo & setViewType( VULKAN_HPP_NAMESPACE::ImageViewType viewType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      viewType = viewType_;
+      return *this;
+    }
+
+    ImageViewCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
+    {
+      format = format_;
+      return *this;
+    }
+
+    ImageViewCreateInfo & setComponents( VULKAN_HPP_NAMESPACE::ComponentMapping const & components_ ) VULKAN_HPP_NOEXCEPT
+    {
+      components = components_;
+      return *this;
+    }
+
+    ImageViewCreateInfo & setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT
+    {
+      subresourceRange = subresourceRange_;
+      return *this;
+    }
+
+
+    operator VkImageViewCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageViewCreateInfo*>( this );
+    }
+
+    operator VkImageViewCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageViewCreateInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ImageViewCreateInfo const& ) const = default;
+#else
+    bool operator==( ImageViewCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( image == rhs.image )
+          && ( viewType == rhs.viewType )
+          && ( format == rhs.format )
+          && ( components == rhs.components )
+          && ( subresourceRange == rhs.subresourceRange );
+    }
+
+    bool operator!=( ImageViewCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewCreateInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags = {};
+    VULKAN_HPP_NAMESPACE::Image image = {};
+    VULKAN_HPP_NAMESPACE::ImageViewType viewType = VULKAN_HPP_NAMESPACE::ImageViewType::e1D;
+    VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+    VULKAN_HPP_NAMESPACE::ComponentMapping components = {};
+    VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {};
+
+  };
+  static_assert( sizeof( ImageViewCreateInfo ) == sizeof( VkImageViewCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImageViewCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eImageViewCreateInfo>
+  {
+    using Type = ImageViewCreateInfo;
+  };
+
+  struct IndirectCommandsLayoutTokenNV
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectCommandsLayoutTokenNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutTokenNV(VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType_ = VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV::eShaderGroup, uint32_t stream_ = {}, uint32_t offset_ = {}, uint32_t vertexBindingUnit_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags_ = {}, uint32_t pushconstantOffset_ = {}, uint32_t pushconstantSize_ = {}, VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags_ = {}, uint32_t indexTypeCount_ = {}, const VULKAN_HPP_NAMESPACE::IndexType* pIndexTypes_ = {}, const uint32_t* pIndexTypeValues_ = {}) VULKAN_HPP_NOEXCEPT
+    : tokenType( tokenType_ ), stream( stream_ ), offset( offset_ ), vertexBindingUnit( vertexBindingUnit_ ), vertexDynamicStride( vertexDynamicStride_ ), pushconstantPipelineLayout( pushconstantPipelineLayout_ ), pushconstantShaderStageFlags( pushconstantShaderStageFlags_ ), pushconstantOffset( pushconstantOffset_ ), pushconstantSize( pushconstantSize_ ), indirectStateFlags( indirectStateFlags_ ), indexTypeCount( indexTypeCount_ ), pIndexTypes( pIndexTypes_ ), pIndexTypeValues( pIndexTypeValues_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutTokenNV( IndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    IndirectCommandsLayoutTokenNV( VkIndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    IndirectCommandsLayoutTokenNV( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType_, uint32_t stream_, uint32_t offset_, uint32_t vertexBindingUnit_, VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride_, VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout_, VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags_, uint32_t pushconstantOffset_, uint32_t pushconstantSize_, VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndexType> const & indexTypes_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & indexTypeValues_ = {} )
+    : tokenType( tokenType_ ), stream( stream_ ), offset( offset_ ), vertexBindingUnit( vertexBindingUnit_ ), vertexDynamicStride( vertexDynamicStride_ ), pushconstantPipelineLayout( pushconstantPipelineLayout_ ), pushconstantShaderStageFlags( pushconstantShaderStageFlags_ ), pushconstantOffset( pushconstantOffset_ ), pushconstantSize( pushconstantSize_ ), indirectStateFlags( indirectStateFlags_ ), indexTypeCount( static_cast<uint32_t>( indexTypes_.size() ) ), pIndexTypes( indexTypes_.data() ), pIndexTypeValues( indexTypeValues_.data() )
+    {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+      VULKAN_HPP_ASSERT( indexTypes_.size() == indexTypeValues_.size() );
+#else
+      if ( indexTypes_.size() != indexTypeValues_.size() )
+      {
+        throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::IndirectCommandsLayoutTokenNV::IndirectCommandsLayoutTokenNV: indexTypes_.size() != indexTypeValues_.size()" );
+      }
+#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    IndirectCommandsLayoutTokenNV & operator=( VkIndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV const *>( &rhs );
+      return *this;
+    }
+
+    IndirectCommandsLayoutTokenNV & operator=( IndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( IndirectCommandsLayoutTokenNV ) );
+      return *this;
+    }
+
+    IndirectCommandsLayoutTokenNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    IndirectCommandsLayoutTokenNV & setTokenType( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      tokenType = tokenType_;
+      return *this;
+    }
+
+    IndirectCommandsLayoutTokenNV & setStream( uint32_t stream_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stream = stream_;
+      return *this;
+    }
+
+    IndirectCommandsLayoutTokenNV & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      offset = offset_;
+      return *this;
+    }
+
+    IndirectCommandsLayoutTokenNV & setVertexBindingUnit( uint32_t vertexBindingUnit_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vertexBindingUnit = vertexBindingUnit_;
+      return *this;
+    }
+
+    IndirectCommandsLayoutTokenNV & setVertexDynamicStride( VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vertexDynamicStride = vertexDynamicStride_;
+      return *this;
+    }
+
+    IndirectCommandsLayoutTokenNV & setPushconstantPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pushconstantPipelineLayout = pushconstantPipelineLayout_;
+      return *this;
+    }
+
+    IndirectCommandsLayoutTokenNV & setPushconstantShaderStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pushconstantShaderStageFlags = pushconstantShaderStageFlags_;
+      return *this;
+    }
+
+    IndirectCommandsLayoutTokenNV & setPushconstantOffset( uint32_t pushconstantOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pushconstantOffset = pushconstantOffset_;
+      return *this;
+    }
+
+    IndirectCommandsLayoutTokenNV & setPushconstantSize( uint32_t pushconstantSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pushconstantSize = pushconstantSize_;
+      return *this;
+    }
+
+    IndirectCommandsLayoutTokenNV & setIndirectStateFlags( VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      indirectStateFlags = indirectStateFlags_;
+      return *this;
+    }
+
+    IndirectCommandsLayoutTokenNV & setIndexTypeCount( uint32_t indexTypeCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      indexTypeCount = indexTypeCount_;
+      return *this;
+    }
+
+    IndirectCommandsLayoutTokenNV & setPIndexTypes( const VULKAN_HPP_NAMESPACE::IndexType* pIndexTypes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pIndexTypes = pIndexTypes_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    IndirectCommandsLayoutTokenNV & setIndexTypes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndexType> const & indexTypes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      indexTypeCount = static_cast<uint32_t>( indexTypes_.size() );
+      pIndexTypes = indexTypes_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    IndirectCommandsLayoutTokenNV & setPIndexTypeValues( const uint32_t* pIndexTypeValues_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pIndexTypeValues = pIndexTypeValues_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    IndirectCommandsLayoutTokenNV & setIndexTypeValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & indexTypeValues_ ) VULKAN_HPP_NOEXCEPT
+    {
+      indexTypeCount = static_cast<uint32_t>( indexTypeValues_.size() );
+      pIndexTypeValues = indexTypeValues_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkIndirectCommandsLayoutTokenNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkIndirectCommandsLayoutTokenNV*>( this );
+    }
+
+    operator VkIndirectCommandsLayoutTokenNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkIndirectCommandsLayoutTokenNV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( IndirectCommandsLayoutTokenNV const& ) const = default;
+#else
+    bool operator==( IndirectCommandsLayoutTokenNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( tokenType == rhs.tokenType )
+          && ( stream == rhs.stream )
+          && ( offset == rhs.offset )
+          && ( vertexBindingUnit == rhs.vertexBindingUnit )
+          && ( vertexDynamicStride == rhs.vertexDynamicStride )
+          && ( pushconstantPipelineLayout == rhs.pushconstantPipelineLayout )
+          && ( pushconstantShaderStageFlags == rhs.pushconstantShaderStageFlags )
+          && ( pushconstantOffset == rhs.pushconstantOffset )
+          && ( pushconstantSize == rhs.pushconstantSize )
+          && ( indirectStateFlags == rhs.indirectStateFlags )
+          && ( indexTypeCount == rhs.indexTypeCount )
+          && ( pIndexTypes == rhs.pIndexTypes )
+          && ( pIndexTypeValues == rhs.pIndexTypeValues );
+    }
+
+    bool operator!=( IndirectCommandsLayoutTokenNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectCommandsLayoutTokenNV;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType = VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV::eShaderGroup;
+    uint32_t stream = {};
+    uint32_t offset = {};
+    uint32_t vertexBindingUnit = {};
+    VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride = {};
+    VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout = {};
+    VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags = {};
+    uint32_t pushconstantOffset = {};
+    uint32_t pushconstantSize = {};
+    VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags = {};
+    uint32_t indexTypeCount = {};
+    const VULKAN_HPP_NAMESPACE::IndexType* pIndexTypes = {};
+    const uint32_t* pIndexTypeValues = {};
+
+  };
+  static_assert( sizeof( IndirectCommandsLayoutTokenNV ) == sizeof( VkIndirectCommandsLayoutTokenNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<IndirectCommandsLayoutTokenNV>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eIndirectCommandsLayoutTokenNV>
+  {
+    using Type = IndirectCommandsLayoutTokenNV;
+  };
+
+  struct IndirectCommandsLayoutCreateInfoNV
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectCommandsLayoutCreateInfoNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutCreateInfoNV(VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags_ = {}, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, uint32_t tokenCount_ = {}, const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV* pTokens_ = {}, uint32_t streamCount_ = {}, const uint32_t* pStreamStrides_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), pipelineBindPoint( pipelineBindPoint_ ), tokenCount( tokenCount_ ), pTokens( pTokens_ ), streamCount( streamCount_ ), pStreamStrides( pStreamStrides_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutCreateInfoNV( IndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    IndirectCommandsLayoutCreateInfoNV( VkIndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    IndirectCommandsLayoutCreateInfoNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags_, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV> const & tokens_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & streamStrides_ = {} )
+    : flags( flags_ ), pipelineBindPoint( pipelineBindPoint_ ), tokenCount( static_cast<uint32_t>( tokens_.size() ) ), pTokens( tokens_.data() ), streamCount( static_cast<uint32_t>( streamStrides_.size() ) ), pStreamStrides( streamStrides_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    IndirectCommandsLayoutCreateInfoNV & operator=( VkIndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV const *>( &rhs );
+      return *this;
+    }
+
+    IndirectCommandsLayoutCreateInfoNV & operator=( IndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( IndirectCommandsLayoutCreateInfoNV ) );
+      return *this;
+    }
+
+    IndirectCommandsLayoutCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    IndirectCommandsLayoutCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    IndirectCommandsLayoutCreateInfoNV & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipelineBindPoint = pipelineBindPoint_;
+      return *this;
+    }
+
+    IndirectCommandsLayoutCreateInfoNV & setTokenCount( uint32_t tokenCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      tokenCount = tokenCount_;
+      return *this;
+    }
+
+    IndirectCommandsLayoutCreateInfoNV & setPTokens( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV* pTokens_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pTokens = pTokens_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    IndirectCommandsLayoutCreateInfoNV & setTokens( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV> const & tokens_ ) VULKAN_HPP_NOEXCEPT
+    {
+      tokenCount = static_cast<uint32_t>( tokens_.size() );
+      pTokens = tokens_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    IndirectCommandsLayoutCreateInfoNV & setStreamCount( uint32_t streamCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      streamCount = streamCount_;
+      return *this;
+    }
+
+    IndirectCommandsLayoutCreateInfoNV & setPStreamStrides( const uint32_t* pStreamStrides_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pStreamStrides = pStreamStrides_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    IndirectCommandsLayoutCreateInfoNV & setStreamStrides( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & streamStrides_ ) VULKAN_HPP_NOEXCEPT
+    {
+      streamCount = static_cast<uint32_t>( streamStrides_.size() );
+      pStreamStrides = streamStrides_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkIndirectCommandsLayoutCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV*>( this );
+    }
+
+    operator VkIndirectCommandsLayoutCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkIndirectCommandsLayoutCreateInfoNV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( IndirectCommandsLayoutCreateInfoNV const& ) const = default;
+#else
+    bool operator==( IndirectCommandsLayoutCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( pipelineBindPoint == rhs.pipelineBindPoint )
+          && ( tokenCount == rhs.tokenCount )
+          && ( pTokens == rhs.pTokens )
+          && ( streamCount == rhs.streamCount )
+          && ( pStreamStrides == rhs.pStreamStrides );
+    }
+
+    bool operator!=( IndirectCommandsLayoutCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectCommandsLayoutCreateInfoNV;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags = {};
+    VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
+    uint32_t tokenCount = {};
+    const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV* pTokens = {};
+    uint32_t streamCount = {};
+    const uint32_t* pStreamStrides = {};
+
+  };
+  static_assert( sizeof( IndirectCommandsLayoutCreateInfoNV ) == sizeof( VkIndirectCommandsLayoutCreateInfoNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<IndirectCommandsLayoutCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eIndirectCommandsLayoutCreateInfoNV>
+  {
+    using Type = IndirectCommandsLayoutCreateInfoNV;
+  };
+
+  struct PipelineCacheCreateInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCacheCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PipelineCacheCreateInfo(VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_ = {}, size_t initialDataSize_ = {}, const void* pInitialData_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), initialDataSize( initialDataSize_ ), pInitialData( pInitialData_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineCacheCreateInfo( PipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineCacheCreateInfo( VkPipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    template <typename T>
+    PipelineCacheCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & initialData_ )
+    : flags( flags_ ), initialDataSize( initialData_.size() * sizeof(T) ), pInitialData( initialData_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PipelineCacheCreateInfo & operator=( VkPipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+    PipelineCacheCreateInfo & operator=( PipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineCacheCreateInfo ) );
+      return *this;
+    }
+
+    PipelineCacheCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineCacheCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    PipelineCacheCreateInfo & setInitialDataSize( size_t initialDataSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      initialDataSize = initialDataSize_;
+      return *this;
+    }
+
+    PipelineCacheCreateInfo & setPInitialData( const void* pInitialData_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pInitialData = pInitialData_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    template <typename T>
+    PipelineCacheCreateInfo & setInitialData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & initialData_ ) VULKAN_HPP_NOEXCEPT
+    {
+      initialDataSize = initialData_.size() * sizeof(T);
+      pInitialData = initialData_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkPipelineCacheCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineCacheCreateInfo*>( this );
+    }
+
+    operator VkPipelineCacheCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineCacheCreateInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PipelineCacheCreateInfo const& ) const = default;
+#else
+    bool operator==( PipelineCacheCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( initialDataSize == rhs.initialDataSize )
+          && ( pInitialData == rhs.pInitialData );
+    }
+
+    bool operator!=( PipelineCacheCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCacheCreateInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags = {};
+    size_t initialDataSize = {};
+    const void* pInitialData = {};
+
+  };
+  static_assert( sizeof( PipelineCacheCreateInfo ) == sizeof( VkPipelineCacheCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineCacheCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineCacheCreateInfo>
+  {
+    using Type = PipelineCacheCreateInfo;
+  };
+
+  struct PushConstantRange
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PushConstantRange(VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, uint32_t offset_ = {}, uint32_t size_ = {}) VULKAN_HPP_NOEXCEPT
+    : stageFlags( stageFlags_ ), offset( offset_ ), size( size_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PushConstantRange( PushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PushConstantRange( VkPushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PushConstantRange & operator=( VkPushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PushConstantRange const *>( &rhs );
+      return *this;
+    }
+
+    PushConstantRange & operator=( PushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PushConstantRange ) );
+      return *this;
+    }
+
+    PushConstantRange & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stageFlags = stageFlags_;
+      return *this;
+    }
+
+    PushConstantRange & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      offset = offset_;
+      return *this;
+    }
+
+    PushConstantRange & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT
+    {
+      size = size_;
+      return *this;
+    }
+
+
+    operator VkPushConstantRange const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPushConstantRange*>( this );
+    }
+
+    operator VkPushConstantRange &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPushConstantRange*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PushConstantRange const& ) const = default;
+#else
+    bool operator==( PushConstantRange const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( stageFlags == rhs.stageFlags )
+          && ( offset == rhs.offset )
+          && ( size == rhs.size );
+    }
+
+    bool operator!=( PushConstantRange const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {};
+    uint32_t offset = {};
+    uint32_t size = {};
+
+  };
+  static_assert( sizeof( PushConstantRange ) == sizeof( VkPushConstantRange ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PushConstantRange>::value, "struct wrapper is not a standard layout!" );
+
+  struct PipelineLayoutCreateInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineLayoutCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PipelineLayoutCreateInfo(VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_ = {}, uint32_t setLayoutCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts_ = {}, uint32_t pushConstantRangeCount_ = {}, const VULKAN_HPP_NAMESPACE::PushConstantRange* pPushConstantRanges_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), setLayoutCount( setLayoutCount_ ), pSetLayouts( pSetLayouts_ ), pushConstantRangeCount( pushConstantRangeCount_ ), pPushConstantRanges( pPushConstantRanges_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineLayoutCreateInfo( PipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineLayoutCreateInfo( VkPipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    PipelineLayoutCreateInfo( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayout> const & setLayouts_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PushConstantRange> const & pushConstantRanges_ = {} )
+    : flags( flags_ ), setLayoutCount( static_cast<uint32_t>( setLayouts_.size() ) ), pSetLayouts( setLayouts_.data() ), pushConstantRangeCount( static_cast<uint32_t>( pushConstantRanges_.size() ) ), pPushConstantRanges( pushConstantRanges_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PipelineLayoutCreateInfo & operator=( VkPipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+    PipelineLayoutCreateInfo & operator=( PipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineLayoutCreateInfo ) );
+      return *this;
+    }
+
+    PipelineLayoutCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineLayoutCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    PipelineLayoutCreateInfo & setSetLayoutCount( uint32_t setLayoutCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      setLayoutCount = setLayoutCount_;
+      return *this;
+    }
+
+    PipelineLayoutCreateInfo & setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pSetLayouts = pSetLayouts_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    PipelineLayoutCreateInfo & setSetLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayout> const & setLayouts_ ) VULKAN_HPP_NOEXCEPT
+    {
+      setLayoutCount = static_cast<uint32_t>( setLayouts_.size() );
+      pSetLayouts = setLayouts_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    PipelineLayoutCreateInfo & setPushConstantRangeCount( uint32_t pushConstantRangeCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pushConstantRangeCount = pushConstantRangeCount_;
+      return *this;
+    }
+
+    PipelineLayoutCreateInfo & setPPushConstantRanges( const VULKAN_HPP_NAMESPACE::PushConstantRange* pPushConstantRanges_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pPushConstantRanges = pPushConstantRanges_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    PipelineLayoutCreateInfo & setPushConstantRanges( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PushConstantRange> const & pushConstantRanges_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pushConstantRangeCount = static_cast<uint32_t>( pushConstantRanges_.size() );
+      pPushConstantRanges = pushConstantRanges_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkPipelineLayoutCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineLayoutCreateInfo*>( this );
+    }
+
+    operator VkPipelineLayoutCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineLayoutCreateInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PipelineLayoutCreateInfo const& ) const = default;
+#else
+    bool operator==( PipelineLayoutCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( setLayoutCount == rhs.setLayoutCount )
+          && ( pSetLayouts == rhs.pSetLayouts )
+          && ( pushConstantRangeCount == rhs.pushConstantRangeCount )
+          && ( pPushConstantRanges == rhs.pPushConstantRanges );
+    }
+
+    bool operator!=( PipelineLayoutCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineLayoutCreateInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags = {};
+    uint32_t setLayoutCount = {};
+    const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts = {};
+    uint32_t pushConstantRangeCount = {};
+    const VULKAN_HPP_NAMESPACE::PushConstantRange* pPushConstantRanges = {};
+
+  };
+  static_assert( sizeof( PipelineLayoutCreateInfo ) == sizeof( VkPipelineLayoutCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineLayoutCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineLayoutCreateInfo>
+  {
+    using Type = PipelineLayoutCreateInfo;
+  };
+
+  struct PrivateDataSlotCreateInfoEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePrivateDataSlotCreateInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PrivateDataSlotCreateInfoEXT(VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlagsEXT flags_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PrivateDataSlotCreateInfoEXT( PrivateDataSlotCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PrivateDataSlotCreateInfoEXT( VkPrivateDataSlotCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PrivateDataSlotCreateInfoEXT & operator=( VkPrivateDataSlotCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+    PrivateDataSlotCreateInfoEXT & operator=( PrivateDataSlotCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PrivateDataSlotCreateInfoEXT ) );
+      return *this;
+    }
+
+    PrivateDataSlotCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PrivateDataSlotCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+
+    operator VkPrivateDataSlotCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPrivateDataSlotCreateInfoEXT*>( this );
+    }
+
+    operator VkPrivateDataSlotCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPrivateDataSlotCreateInfoEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PrivateDataSlotCreateInfoEXT const& ) const = default;
+#else
+    bool operator==( PrivateDataSlotCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags );
+    }
+
+    bool operator!=( PrivateDataSlotCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePrivateDataSlotCreateInfoEXT;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlagsEXT flags = {};
+
+  };
+  static_assert( sizeof( PrivateDataSlotCreateInfoEXT ) == sizeof( VkPrivateDataSlotCreateInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PrivateDataSlotCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePrivateDataSlotCreateInfoEXT>
+  {
+    using Type = PrivateDataSlotCreateInfoEXT;
+  };
+
+  class PrivateDataSlotEXT
+  {
+  public:
+    using CType = VkPrivateDataSlotEXT;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePrivateDataSlotEXT;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
+
+  public:
+    VULKAN_HPP_CONSTEXPR PrivateDataSlotEXT() VULKAN_HPP_NOEXCEPT
+      : m_privateDataSlotEXT(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR PrivateDataSlotEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_privateDataSlotEXT(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT PrivateDataSlotEXT( VkPrivateDataSlotEXT privateDataSlotEXT ) VULKAN_HPP_NOEXCEPT
+      : m_privateDataSlotEXT( privateDataSlotEXT )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    PrivateDataSlotEXT & operator=(VkPrivateDataSlotEXT privateDataSlotEXT) VULKAN_HPP_NOEXCEPT
+    {
+      m_privateDataSlotEXT = privateDataSlotEXT;
+      return *this;
+    }
+#endif
+
+    PrivateDataSlotEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_privateDataSlotEXT = VK_NULL_HANDLE;
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PrivateDataSlotEXT const& ) const = default;
+#else
+    bool operator==( PrivateDataSlotEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_privateDataSlotEXT == rhs.m_privateDataSlotEXT;
+    }
+
+    bool operator!=(PrivateDataSlotEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_privateDataSlotEXT != rhs.m_privateDataSlotEXT;
+    }
+
+    bool operator<(PrivateDataSlotEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_privateDataSlotEXT < rhs.m_privateDataSlotEXT;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPrivateDataSlotEXT() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_privateDataSlotEXT;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_privateDataSlotEXT != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_privateDataSlotEXT == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkPrivateDataSlotEXT m_privateDataSlotEXT;
+  };
+  static_assert( sizeof( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT ) == sizeof( VkPrivateDataSlotEXT ), "handle and wrapper have different size!" );
+
+  template <>
+  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::ePrivateDataSlotEXT>
+  {
+    using type = VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT;
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::ePrivateDataSlotEXT>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT;
+  };
+
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  struct QueryPoolCreateInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueryPoolCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR QueryPoolCreateInfo(VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::QueryType queryType_ = VULKAN_HPP_NAMESPACE::QueryType::eOcclusion, uint32_t queryCount_ = {}, VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), queryType( queryType_ ), queryCount( queryCount_ ), pipelineStatistics( pipelineStatistics_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR QueryPoolCreateInfo( QueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    QueryPoolCreateInfo( VkQueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    QueryPoolCreateInfo & operator=( VkQueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+    QueryPoolCreateInfo & operator=( QueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( QueryPoolCreateInfo ) );
+      return *this;
+    }
+
+    QueryPoolCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    QueryPoolCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    QueryPoolCreateInfo & setQueryType( VULKAN_HPP_NAMESPACE::QueryType queryType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queryType = queryType_;
+      return *this;
+    }
+
+    QueryPoolCreateInfo & setQueryCount( uint32_t queryCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queryCount = queryCount_;
+      return *this;
+    }
+
+    QueryPoolCreateInfo & setPipelineStatistics( VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipelineStatistics = pipelineStatistics_;
+      return *this;
+    }
+
+
+    operator VkQueryPoolCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkQueryPoolCreateInfo*>( this );
+    }
+
+    operator VkQueryPoolCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkQueryPoolCreateInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( QueryPoolCreateInfo const& ) const = default;
+#else
+    bool operator==( QueryPoolCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( queryType == rhs.queryType )
+          && ( queryCount == rhs.queryCount )
+          && ( pipelineStatistics == rhs.pipelineStatistics );
+    }
+
+    bool operator!=( QueryPoolCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolCreateInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags = {};
+    VULKAN_HPP_NAMESPACE::QueryType queryType = VULKAN_HPP_NAMESPACE::QueryType::eOcclusion;
+    uint32_t queryCount = {};
+    VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics = {};
+
+  };
+  static_assert( sizeof( QueryPoolCreateInfo ) == sizeof( VkQueryPoolCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<QueryPoolCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eQueryPoolCreateInfo>
+  {
+    using Type = QueryPoolCreateInfo;
+  };
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+  struct RayTracingShaderGroupCreateInfoKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingShaderGroupCreateInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoKHR(VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral, uint32_t generalShader_ = {}, uint32_t closestHitShader_ = {}, uint32_t anyHitShader_ = {}, uint32_t intersectionShader_ = {}, const void* pShaderGroupCaptureReplayHandle_ = {}) VULKAN_HPP_NOEXCEPT
+    : type( type_ ), generalShader( generalShader_ ), closestHitShader( closestHitShader_ ), anyHitShader( anyHitShader_ ), intersectionShader( intersectionShader_ ), pShaderGroupCaptureReplayHandle( pShaderGroupCaptureReplayHandle_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoKHR( RayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    RayTracingShaderGroupCreateInfoKHR( VkRayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    RayTracingShaderGroupCreateInfoKHR & operator=( VkRayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+    RayTracingShaderGroupCreateInfoKHR & operator=( RayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( RayTracingShaderGroupCreateInfoKHR ) );
+      return *this;
+    }
+
+    RayTracingShaderGroupCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    RayTracingShaderGroupCreateInfoKHR & setType( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ ) VULKAN_HPP_NOEXCEPT
+    {
+      type = type_;
+      return *this;
+    }
+
+    RayTracingShaderGroupCreateInfoKHR & setGeneralShader( uint32_t generalShader_ ) VULKAN_HPP_NOEXCEPT
+    {
+      generalShader = generalShader_;
+      return *this;
+    }
+
+    RayTracingShaderGroupCreateInfoKHR & setClosestHitShader( uint32_t closestHitShader_ ) VULKAN_HPP_NOEXCEPT
+    {
+      closestHitShader = closestHitShader_;
+      return *this;
+    }
+
+    RayTracingShaderGroupCreateInfoKHR & setAnyHitShader( uint32_t anyHitShader_ ) VULKAN_HPP_NOEXCEPT
+    {
+      anyHitShader = anyHitShader_;
+      return *this;
+    }
+
+    RayTracingShaderGroupCreateInfoKHR & setIntersectionShader( uint32_t intersectionShader_ ) VULKAN_HPP_NOEXCEPT
+    {
+      intersectionShader = intersectionShader_;
+      return *this;
+    }
+
+    RayTracingShaderGroupCreateInfoKHR & setPShaderGroupCaptureReplayHandle( const void* pShaderGroupCaptureReplayHandle_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pShaderGroupCaptureReplayHandle = pShaderGroupCaptureReplayHandle_;
+      return *this;
+    }
+
+
+    operator VkRayTracingShaderGroupCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkRayTracingShaderGroupCreateInfoKHR*>( this );
+    }
+
+    operator VkRayTracingShaderGroupCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkRayTracingShaderGroupCreateInfoKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( RayTracingShaderGroupCreateInfoKHR const& ) const = default;
+#else
+    bool operator==( RayTracingShaderGroupCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( type == rhs.type )
+          && ( generalShader == rhs.generalShader )
+          && ( closestHitShader == rhs.closestHitShader )
+          && ( anyHitShader == rhs.anyHitShader )
+          && ( intersectionShader == rhs.intersectionShader )
+          && ( pShaderGroupCaptureReplayHandle == rhs.pShaderGroupCaptureReplayHandle );
+    }
+
+    bool operator!=( RayTracingShaderGroupCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingShaderGroupCreateInfoKHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral;
+    uint32_t generalShader = {};
+    uint32_t closestHitShader = {};
+    uint32_t anyHitShader = {};
+    uint32_t intersectionShader = {};
+    const void* pShaderGroupCaptureReplayHandle = {};
+
+  };
+  static_assert( sizeof( RayTracingShaderGroupCreateInfoKHR ) == sizeof( VkRayTracingShaderGroupCreateInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<RayTracingShaderGroupCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eRayTracingShaderGroupCreateInfoKHR>
+  {
+    using Type = RayTracingShaderGroupCreateInfoKHR;
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+  struct PipelineLibraryCreateInfoKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineLibraryCreateInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PipelineLibraryCreateInfoKHR(uint32_t libraryCount_ = {}, const VULKAN_HPP_NAMESPACE::Pipeline* pLibraries_ = {}) VULKAN_HPP_NOEXCEPT
+    : libraryCount( libraryCount_ ), pLibraries( pLibraries_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineLibraryCreateInfoKHR( PipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineLibraryCreateInfoKHR( VkPipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    PipelineLibraryCreateInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Pipeline> const & libraries_ )
+    : libraryCount( static_cast<uint32_t>( libraries_.size() ) ), pLibraries( libraries_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PipelineLibraryCreateInfoKHR & operator=( VkPipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+    PipelineLibraryCreateInfoKHR & operator=( PipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineLibraryCreateInfoKHR ) );
+      return *this;
+    }
+
+    PipelineLibraryCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineLibraryCreateInfoKHR & setLibraryCount( uint32_t libraryCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      libraryCount = libraryCount_;
+      return *this;
+    }
+
+    PipelineLibraryCreateInfoKHR & setPLibraries( const VULKAN_HPP_NAMESPACE::Pipeline* pLibraries_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pLibraries = pLibraries_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    PipelineLibraryCreateInfoKHR & setLibraries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Pipeline> const & libraries_ ) VULKAN_HPP_NOEXCEPT
+    {
+      libraryCount = static_cast<uint32_t>( libraries_.size() );
+      pLibraries = libraries_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkPipelineLibraryCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineLibraryCreateInfoKHR*>( this );
+    }
+
+    operator VkPipelineLibraryCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineLibraryCreateInfoKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PipelineLibraryCreateInfoKHR const& ) const = default;
+#else
+    bool operator==( PipelineLibraryCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( libraryCount == rhs.libraryCount )
+          && ( pLibraries == rhs.pLibraries );
+    }
+
+    bool operator!=( PipelineLibraryCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineLibraryCreateInfoKHR;
+    const void* pNext = {};
+    uint32_t libraryCount = {};
+    const VULKAN_HPP_NAMESPACE::Pipeline* pLibraries = {};
+
+  };
+  static_assert( sizeof( PipelineLibraryCreateInfoKHR ) == sizeof( VkPipelineLibraryCreateInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineLibraryCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineLibraryCreateInfoKHR>
+  {
+    using Type = PipelineLibraryCreateInfoKHR;
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+  struct RayTracingPipelineInterfaceCreateInfoKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingPipelineInterfaceCreateInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR RayTracingPipelineInterfaceCreateInfoKHR(uint32_t maxPayloadSize_ = {}, uint32_t maxAttributeSize_ = {}, uint32_t maxCallableSize_ = {}) VULKAN_HPP_NOEXCEPT
+    : maxPayloadSize( maxPayloadSize_ ), maxAttributeSize( maxAttributeSize_ ), maxCallableSize( maxCallableSize_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR RayTracingPipelineInterfaceCreateInfoKHR( RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    RayTracingPipelineInterfaceCreateInfoKHR( VkRayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    RayTracingPipelineInterfaceCreateInfoKHR & operator=( VkRayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+    RayTracingPipelineInterfaceCreateInfoKHR & operator=( RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( RayTracingPipelineInterfaceCreateInfoKHR ) );
+      return *this;
+    }
+
+    RayTracingPipelineInterfaceCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    RayTracingPipelineInterfaceCreateInfoKHR & setMaxPayloadSize( uint32_t maxPayloadSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxPayloadSize = maxPayloadSize_;
+      return *this;
+    }
+
+    RayTracingPipelineInterfaceCreateInfoKHR & setMaxAttributeSize( uint32_t maxAttributeSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxAttributeSize = maxAttributeSize_;
+      return *this;
+    }
+
+    RayTracingPipelineInterfaceCreateInfoKHR & setMaxCallableSize( uint32_t maxCallableSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxCallableSize = maxCallableSize_;
+      return *this;
+    }
+
+
+    operator VkRayTracingPipelineInterfaceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkRayTracingPipelineInterfaceCreateInfoKHR*>( this );
+    }
+
+    operator VkRayTracingPipelineInterfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkRayTracingPipelineInterfaceCreateInfoKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( RayTracingPipelineInterfaceCreateInfoKHR const& ) const = default;
+#else
+    bool operator==( RayTracingPipelineInterfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxPayloadSize == rhs.maxPayloadSize )
+          && ( maxAttributeSize == rhs.maxAttributeSize )
+          && ( maxCallableSize == rhs.maxCallableSize );
+    }
+
+    bool operator!=( RayTracingPipelineInterfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineInterfaceCreateInfoKHR;
+    const void* pNext = {};
+    uint32_t maxPayloadSize = {};
+    uint32_t maxAttributeSize = {};
+    uint32_t maxCallableSize = {};
+
+  };
+  static_assert( sizeof( RayTracingPipelineInterfaceCreateInfoKHR ) == sizeof( VkRayTracingPipelineInterfaceCreateInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<RayTracingPipelineInterfaceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eRayTracingPipelineInterfaceCreateInfoKHR>
+  {
+    using Type = RayTracingPipelineInterfaceCreateInfoKHR;
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+  struct RayTracingPipelineCreateInfoKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingPipelineCreateInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoKHR(VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, uint32_t stageCount_ = {}, const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ = {}, uint32_t groupCount_ = {}, const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR* pGroups_ = {}, uint32_t maxRecursionDepth_ = {}, VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR libraries_ = {}, const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR* pLibraryInterface_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), stageCount( stageCount_ ), pStages( pStages_ ), groupCount( groupCount_ ), pGroups( pGroups_ ), maxRecursionDepth( maxRecursionDepth_ ), libraries( libraries_ ), pLibraryInterface( pLibraryInterface_ ), layout( layout_ ), basePipelineHandle( basePipelineHandle_ ), basePipelineIndex( basePipelineIndex_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoKHR( RayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    RayTracingPipelineCreateInfoKHR( VkRayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    RayTracingPipelineCreateInfoKHR( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const & stages_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR> const & groups_ = {}, uint32_t maxRecursionDepth_ = {}, VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR libraries_ = {}, const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR* pLibraryInterface_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {} )
+    : flags( flags_ ), stageCount( static_cast<uint32_t>( stages_.size() ) ), pStages( stages_.data() ), groupCount( static_cast<uint32_t>( groups_.size() ) ), pGroups( groups_.data() ), maxRecursionDepth( maxRecursionDepth_ ), libraries( libraries_ ), pLibraryInterface( pLibraryInterface_ ), layout( layout_ ), basePipelineHandle( basePipelineHandle_ ), basePipelineIndex( basePipelineIndex_ )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    RayTracingPipelineCreateInfoKHR & operator=( VkRayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+    RayTracingPipelineCreateInfoKHR & operator=( RayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( RayTracingPipelineCreateInfoKHR ) );
+      return *this;
+    }
+
+    RayTracingPipelineCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    RayTracingPipelineCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    RayTracingPipelineCreateInfoKHR & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stageCount = stageCount_;
+      return *this;
+    }
+
+    RayTracingPipelineCreateInfoKHR & setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pStages = pStages_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    RayTracingPipelineCreateInfoKHR & setStages( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const & stages_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stageCount = static_cast<uint32_t>( stages_.size() );
+      pStages = stages_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    RayTracingPipelineCreateInfoKHR & setGroupCount( uint32_t groupCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      groupCount = groupCount_;
+      return *this;
+    }
+
+    RayTracingPipelineCreateInfoKHR & setPGroups( const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR* pGroups_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pGroups = pGroups_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    RayTracingPipelineCreateInfoKHR & setGroups( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR> const & groups_ ) VULKAN_HPP_NOEXCEPT
+    {
+      groupCount = static_cast<uint32_t>( groups_.size() );
+      pGroups = groups_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    RayTracingPipelineCreateInfoKHR & setMaxRecursionDepth( uint32_t maxRecursionDepth_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxRecursionDepth = maxRecursionDepth_;
+      return *this;
+    }
+
+    RayTracingPipelineCreateInfoKHR & setLibraries( VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR const & libraries_ ) VULKAN_HPP_NOEXCEPT
+    {
+      libraries = libraries_;
+      return *this;
+    }
+
+    RayTracingPipelineCreateInfoKHR & setPLibraryInterface( const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR* pLibraryInterface_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pLibraryInterface = pLibraryInterface_;
+      return *this;
+    }
+
+    RayTracingPipelineCreateInfoKHR & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      layout = layout_;
+      return *this;
+    }
+
+    RayTracingPipelineCreateInfoKHR & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT
+    {
+      basePipelineHandle = basePipelineHandle_;
+      return *this;
+    }
+
+    RayTracingPipelineCreateInfoKHR & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      basePipelineIndex = basePipelineIndex_;
+      return *this;
+    }
+
+
+    operator VkRayTracingPipelineCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR*>( this );
+    }
+
+    operator VkRayTracingPipelineCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkRayTracingPipelineCreateInfoKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( RayTracingPipelineCreateInfoKHR const& ) const = default;
+#else
+    bool operator==( RayTracingPipelineCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( stageCount == rhs.stageCount )
+          && ( pStages == rhs.pStages )
+          && ( groupCount == rhs.groupCount )
+          && ( pGroups == rhs.pGroups )
+          && ( maxRecursionDepth == rhs.maxRecursionDepth )
+          && ( libraries == rhs.libraries )
+          && ( pLibraryInterface == rhs.pLibraryInterface )
+          && ( layout == rhs.layout )
+          && ( basePipelineHandle == rhs.basePipelineHandle )
+          && ( basePipelineIndex == rhs.basePipelineIndex );
+    }
+
+    bool operator!=( RayTracingPipelineCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineCreateInfoKHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {};
+    uint32_t stageCount = {};
+    const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages = {};
+    uint32_t groupCount = {};
+    const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR* pGroups = {};
+    uint32_t maxRecursionDepth = {};
+    VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR libraries = {};
+    const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR* pLibraryInterface = {};
+    VULKAN_HPP_NAMESPACE::PipelineLayout layout = {};
+    VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {};
+    int32_t basePipelineIndex = {};
+
+  };
+  static_assert( sizeof( RayTracingPipelineCreateInfoKHR ) == sizeof( VkRayTracingPipelineCreateInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<RayTracingPipelineCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eRayTracingPipelineCreateInfoKHR>
+  {
+    using Type = RayTracingPipelineCreateInfoKHR;
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  struct RayTracingShaderGroupCreateInfoNV
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingShaderGroupCreateInfoNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoNV(VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral, uint32_t generalShader_ = {}, uint32_t closestHitShader_ = {}, uint32_t anyHitShader_ = {}, uint32_t intersectionShader_ = {}) VULKAN_HPP_NOEXCEPT
+    : type( type_ ), generalShader( generalShader_ ), closestHitShader( closestHitShader_ ), anyHitShader( anyHitShader_ ), intersectionShader( intersectionShader_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoNV( RayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    RayTracingShaderGroupCreateInfoNV( VkRayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    RayTracingShaderGroupCreateInfoNV & operator=( VkRayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV const *>( &rhs );
+      return *this;
+    }
+
+    RayTracingShaderGroupCreateInfoNV & operator=( RayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( RayTracingShaderGroupCreateInfoNV ) );
+      return *this;
+    }
+
+    RayTracingShaderGroupCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    RayTracingShaderGroupCreateInfoNV & setType( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ ) VULKAN_HPP_NOEXCEPT
+    {
+      type = type_;
+      return *this;
+    }
+
+    RayTracingShaderGroupCreateInfoNV & setGeneralShader( uint32_t generalShader_ ) VULKAN_HPP_NOEXCEPT
+    {
+      generalShader = generalShader_;
+      return *this;
+    }
+
+    RayTracingShaderGroupCreateInfoNV & setClosestHitShader( uint32_t closestHitShader_ ) VULKAN_HPP_NOEXCEPT
+    {
+      closestHitShader = closestHitShader_;
+      return *this;
+    }
+
+    RayTracingShaderGroupCreateInfoNV & setAnyHitShader( uint32_t anyHitShader_ ) VULKAN_HPP_NOEXCEPT
+    {
+      anyHitShader = anyHitShader_;
+      return *this;
+    }
+
+    RayTracingShaderGroupCreateInfoNV & setIntersectionShader( uint32_t intersectionShader_ ) VULKAN_HPP_NOEXCEPT
+    {
+      intersectionShader = intersectionShader_;
+      return *this;
+    }
+
+
+    operator VkRayTracingShaderGroupCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkRayTracingShaderGroupCreateInfoNV*>( this );
+    }
+
+    operator VkRayTracingShaderGroupCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkRayTracingShaderGroupCreateInfoNV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( RayTracingShaderGroupCreateInfoNV const& ) const = default;
+#else
+    bool operator==( RayTracingShaderGroupCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( type == rhs.type )
+          && ( generalShader == rhs.generalShader )
+          && ( closestHitShader == rhs.closestHitShader )
+          && ( anyHitShader == rhs.anyHitShader )
+          && ( intersectionShader == rhs.intersectionShader );
+    }
+
+    bool operator!=( RayTracingShaderGroupCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingShaderGroupCreateInfoNV;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral;
+    uint32_t generalShader = {};
+    uint32_t closestHitShader = {};
+    uint32_t anyHitShader = {};
+    uint32_t intersectionShader = {};
+
+  };
+  static_assert( sizeof( RayTracingShaderGroupCreateInfoNV ) == sizeof( VkRayTracingShaderGroupCreateInfoNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<RayTracingShaderGroupCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eRayTracingShaderGroupCreateInfoNV>
+  {
+    using Type = RayTracingShaderGroupCreateInfoNV;
+  };
+
+  struct RayTracingPipelineCreateInfoNV
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingPipelineCreateInfoNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoNV(VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, uint32_t stageCount_ = {}, const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ = {}, uint32_t groupCount_ = {}, const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV* pGroups_ = {}, uint32_t maxRecursionDepth_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), stageCount( stageCount_ ), pStages( pStages_ ), groupCount( groupCount_ ), pGroups( pGroups_ ), maxRecursionDepth( maxRecursionDepth_ ), layout( layout_ ), basePipelineHandle( basePipelineHandle_ ), basePipelineIndex( basePipelineIndex_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoNV( RayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    RayTracingPipelineCreateInfoNV( VkRayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    RayTracingPipelineCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const & stages_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV> const & groups_ = {}, uint32_t maxRecursionDepth_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {} )
+    : flags( flags_ ), stageCount( static_cast<uint32_t>( stages_.size() ) ), pStages( stages_.data() ), groupCount( static_cast<uint32_t>( groups_.size() ) ), pGroups( groups_.data() ), maxRecursionDepth( maxRecursionDepth_ ), layout( layout_ ), basePipelineHandle( basePipelineHandle_ ), basePipelineIndex( basePipelineIndex_ )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    RayTracingPipelineCreateInfoNV & operator=( VkRayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV const *>( &rhs );
+      return *this;
+    }
+
+    RayTracingPipelineCreateInfoNV & operator=( RayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( RayTracingPipelineCreateInfoNV ) );
+      return *this;
+    }
+
+    RayTracingPipelineCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    RayTracingPipelineCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    RayTracingPipelineCreateInfoNV & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stageCount = stageCount_;
+      return *this;
+    }
+
+    RayTracingPipelineCreateInfoNV & setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pStages = pStages_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    RayTracingPipelineCreateInfoNV & setStages( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const & stages_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stageCount = static_cast<uint32_t>( stages_.size() );
+      pStages = stages_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    RayTracingPipelineCreateInfoNV & setGroupCount( uint32_t groupCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      groupCount = groupCount_;
+      return *this;
+    }
+
+    RayTracingPipelineCreateInfoNV & setPGroups( const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV* pGroups_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pGroups = pGroups_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    RayTracingPipelineCreateInfoNV & setGroups( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV> const & groups_ ) VULKAN_HPP_NOEXCEPT
+    {
+      groupCount = static_cast<uint32_t>( groups_.size() );
+      pGroups = groups_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    RayTracingPipelineCreateInfoNV & setMaxRecursionDepth( uint32_t maxRecursionDepth_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxRecursionDepth = maxRecursionDepth_;
+      return *this;
+    }
+
+    RayTracingPipelineCreateInfoNV & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      layout = layout_;
+      return *this;
+    }
+
+    RayTracingPipelineCreateInfoNV & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT
+    {
+      basePipelineHandle = basePipelineHandle_;
+      return *this;
+    }
+
+    RayTracingPipelineCreateInfoNV & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      basePipelineIndex = basePipelineIndex_;
+      return *this;
+    }
+
+
+    operator VkRayTracingPipelineCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkRayTracingPipelineCreateInfoNV*>( this );
+    }
+
+    operator VkRayTracingPipelineCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkRayTracingPipelineCreateInfoNV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( RayTracingPipelineCreateInfoNV const& ) const = default;
+#else
+    bool operator==( RayTracingPipelineCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( stageCount == rhs.stageCount )
+          && ( pStages == rhs.pStages )
+          && ( groupCount == rhs.groupCount )
+          && ( pGroups == rhs.pGroups )
+          && ( maxRecursionDepth == rhs.maxRecursionDepth )
+          && ( layout == rhs.layout )
+          && ( basePipelineHandle == rhs.basePipelineHandle )
+          && ( basePipelineIndex == rhs.basePipelineIndex );
+    }
+
+    bool operator!=( RayTracingPipelineCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineCreateInfoNV;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {};
+    uint32_t stageCount = {};
+    const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages = {};
+    uint32_t groupCount = {};
+    const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV* pGroups = {};
+    uint32_t maxRecursionDepth = {};
+    VULKAN_HPP_NAMESPACE::PipelineLayout layout = {};
+    VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {};
+    int32_t basePipelineIndex = {};
+
+  };
+  static_assert( sizeof( RayTracingPipelineCreateInfoNV ) == sizeof( VkRayTracingPipelineCreateInfoNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<RayTracingPipelineCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eRayTracingPipelineCreateInfoNV>
+  {
+    using Type = RayTracingPipelineCreateInfoNV;
+  };
+
+  struct SubpassDescription
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR SubpassDescription(VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ = {}, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, uint32_t inputAttachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference* pInputAttachments_ = {}, uint32_t colorAttachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference* pColorAttachments_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference* pResolveAttachments_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference* pDepthStencilAttachment_ = {}, uint32_t preserveAttachmentCount_ = {}, const uint32_t* pPreserveAttachments_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), pipelineBindPoint( pipelineBindPoint_ ), inputAttachmentCount( inputAttachmentCount_ ), pInputAttachments( pInputAttachments_ ), colorAttachmentCount( colorAttachmentCount_ ), pColorAttachments( pColorAttachments_ ), pResolveAttachments( pResolveAttachments_ ), pDepthStencilAttachment( pDepthStencilAttachment_ ), preserveAttachmentCount( preserveAttachmentCount_ ), pPreserveAttachments( pPreserveAttachments_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SubpassDescription( SubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SubpassDescription( VkSubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    SubpassDescription( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference> const & inputAttachments_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference> const & colorAttachments_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference> const & resolveAttachments_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference* pDepthStencilAttachment_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & preserveAttachments_ = {} )
+    : flags( flags_ ), pipelineBindPoint( pipelineBindPoint_ ), inputAttachmentCount( static_cast<uint32_t>( inputAttachments_.size() ) ), pInputAttachments( inputAttachments_.data() ), colorAttachmentCount( static_cast<uint32_t>( colorAttachments_.size() ) ), pColorAttachments( colorAttachments_.data() ), pResolveAttachments( resolveAttachments_.data() ), pDepthStencilAttachment( pDepthStencilAttachment_ ), preserveAttachmentCount( static_cast<uint32_t>( preserveAttachments_.size() ) ), pPreserveAttachments( preserveAttachments_.data() )
+    {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+      VULKAN_HPP_ASSERT( resolveAttachments_.empty() || ( colorAttachments_.size() == resolveAttachments_.size() ) );
+#else
+      if ( !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() ) )
+      {
+        throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::SubpassDescription::SubpassDescription: !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() )" );
+      }
+#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    SubpassDescription & operator=( VkSubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassDescription const *>( &rhs );
+      return *this;
+    }
+
+    SubpassDescription & operator=( SubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( SubpassDescription ) );
+      return *this;
+    }
+
+    SubpassDescription & setFlags( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    SubpassDescription & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipelineBindPoint = pipelineBindPoint_;
+      return *this;
+    }
+
+    SubpassDescription & setInputAttachmentCount( uint32_t inputAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      inputAttachmentCount = inputAttachmentCount_;
+      return *this;
+    }
+
+    SubpassDescription & setPInputAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference* pInputAttachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pInputAttachments = pInputAttachments_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    SubpassDescription & setInputAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference> const & inputAttachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      inputAttachmentCount = static_cast<uint32_t>( inputAttachments_.size() );
+      pInputAttachments = inputAttachments_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    SubpassDescription & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      colorAttachmentCount = colorAttachmentCount_;
+      return *this;
+    }
+
+    SubpassDescription & setPColorAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference* pColorAttachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pColorAttachments = pColorAttachments_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    SubpassDescription & setColorAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference> const & colorAttachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      colorAttachmentCount = static_cast<uint32_t>( colorAttachments_.size() );
+      pColorAttachments = colorAttachments_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    SubpassDescription & setPResolveAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference* pResolveAttachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pResolveAttachments = pResolveAttachments_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    SubpassDescription & setResolveAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference> const & resolveAttachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      colorAttachmentCount = static_cast<uint32_t>( resolveAttachments_.size() );
+      pResolveAttachments = resolveAttachments_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    SubpassDescription & setPDepthStencilAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference* pDepthStencilAttachment_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pDepthStencilAttachment = pDepthStencilAttachment_;
+      return *this;
+    }
+
+    SubpassDescription & setPreserveAttachmentCount( uint32_t preserveAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      preserveAttachmentCount = preserveAttachmentCount_;
+      return *this;
+    }
+
+    SubpassDescription & setPPreserveAttachments( const uint32_t* pPreserveAttachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pPreserveAttachments = pPreserveAttachments_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    SubpassDescription & setPreserveAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & preserveAttachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      preserveAttachmentCount = static_cast<uint32_t>( preserveAttachments_.size() );
+      pPreserveAttachments = preserveAttachments_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkSubpassDescription const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSubpassDescription*>( this );
+    }
+
+    operator VkSubpassDescription &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSubpassDescription*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( SubpassDescription const& ) const = default;
+#else
+    bool operator==( SubpassDescription const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( flags == rhs.flags )
+          && ( pipelineBindPoint == rhs.pipelineBindPoint )
+          && ( inputAttachmentCount == rhs.inputAttachmentCount )
+          && ( pInputAttachments == rhs.pInputAttachments )
+          && ( colorAttachmentCount == rhs.colorAttachmentCount )
+          && ( pColorAttachments == rhs.pColorAttachments )
+          && ( pResolveAttachments == rhs.pResolveAttachments )
+          && ( pDepthStencilAttachment == rhs.pDepthStencilAttachment )
+          && ( preserveAttachmentCount == rhs.preserveAttachmentCount )
+          && ( pPreserveAttachments == rhs.pPreserveAttachments );
+    }
+
+    bool operator!=( SubpassDescription const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags = {};
+    VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
+    uint32_t inputAttachmentCount = {};
+    const VULKAN_HPP_NAMESPACE::AttachmentReference* pInputAttachments = {};
+    uint32_t colorAttachmentCount = {};
+    const VULKAN_HPP_NAMESPACE::AttachmentReference* pColorAttachments = {};
+    const VULKAN_HPP_NAMESPACE::AttachmentReference* pResolveAttachments = {};
+    const VULKAN_HPP_NAMESPACE::AttachmentReference* pDepthStencilAttachment = {};
+    uint32_t preserveAttachmentCount = {};
+    const uint32_t* pPreserveAttachments = {};
+
+  };
+  static_assert( sizeof( SubpassDescription ) == sizeof( VkSubpassDescription ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SubpassDescription>::value, "struct wrapper is not a standard layout!" );
+
+  struct SubpassDependency
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR SubpassDependency(uint32_t srcSubpass_ = {}, uint32_t dstSubpass_ = {}, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ = {}, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {}) VULKAN_HPP_NOEXCEPT
+    : srcSubpass( srcSubpass_ ), dstSubpass( dstSubpass_ ), srcStageMask( srcStageMask_ ), dstStageMask( dstStageMask_ ), srcAccessMask( srcAccessMask_ ), dstAccessMask( dstAccessMask_ ), dependencyFlags( dependencyFlags_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SubpassDependency( SubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SubpassDependency( VkSubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    SubpassDependency & operator=( VkSubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassDependency const *>( &rhs );
+      return *this;
+    }
+
+    SubpassDependency & operator=( SubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( SubpassDependency ) );
+      return *this;
+    }
+
+    SubpassDependency & setSrcSubpass( uint32_t srcSubpass_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcSubpass = srcSubpass_;
+      return *this;
+    }
+
+    SubpassDependency & setDstSubpass( uint32_t dstSubpass_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstSubpass = dstSubpass_;
+      return *this;
+    }
+
+    SubpassDependency & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcStageMask = srcStageMask_;
+      return *this;
+    }
+
+    SubpassDependency & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstStageMask = dstStageMask_;
+      return *this;
+    }
+
+    SubpassDependency & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcAccessMask = srcAccessMask_;
+      return *this;
+    }
+
+    SubpassDependency & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstAccessMask = dstAccessMask_;
+      return *this;
+    }
+
+    SubpassDependency & setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dependencyFlags = dependencyFlags_;
+      return *this;
+    }
+
+
+    operator VkSubpassDependency const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSubpassDependency*>( this );
+    }
+
+    operator VkSubpassDependency &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSubpassDependency*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( SubpassDependency const& ) const = default;
+#else
+    bool operator==( SubpassDependency const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( srcSubpass == rhs.srcSubpass )
+          && ( dstSubpass == rhs.dstSubpass )
+          && ( srcStageMask == rhs.srcStageMask )
+          && ( dstStageMask == rhs.dstStageMask )
+          && ( srcAccessMask == rhs.srcAccessMask )
+          && ( dstAccessMask == rhs.dstAccessMask )
+          && ( dependencyFlags == rhs.dependencyFlags );
+    }
+
+    bool operator!=( SubpassDependency const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    uint32_t srcSubpass = {};
+    uint32_t dstSubpass = {};
+    VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask = {};
+    VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask = {};
+    VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {};
+    VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {};
+    VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags = {};
+
+  };
+  static_assert( sizeof( SubpassDependency ) == sizeof( VkSubpassDependency ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SubpassDependency>::value, "struct wrapper is not a standard layout!" );
+
+  struct RenderPassCreateInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR RenderPassCreateInfo(VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ = {}, uint32_t attachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentDescription* pAttachments_ = {}, uint32_t subpassCount_ = {}, const VULKAN_HPP_NAMESPACE::SubpassDescription* pSubpasses_ = {}, uint32_t dependencyCount_ = {}, const VULKAN_HPP_NAMESPACE::SubpassDependency* pDependencies_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), attachmentCount( attachmentCount_ ), pAttachments( pAttachments_ ), subpassCount( subpassCount_ ), pSubpasses( pSubpasses_ ), dependencyCount( dependencyCount_ ), pDependencies( pDependencies_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR RenderPassCreateInfo( RenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    RenderPassCreateInfo( VkRenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    RenderPassCreateInfo( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentDescription> const & attachments_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDescription> const & subpasses_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDependency> const & dependencies_ = {} )
+    : flags( flags_ ), attachmentCount( static_cast<uint32_t>( attachments_.size() ) ), pAttachments( attachments_.data() ), subpassCount( static_cast<uint32_t>( subpasses_.size() ) ), pSubpasses( subpasses_.data() ), dependencyCount( static_cast<uint32_t>( dependencies_.size() ) ), pDependencies( dependencies_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    RenderPassCreateInfo & operator=( VkRenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+    RenderPassCreateInfo & operator=( RenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( RenderPassCreateInfo ) );
+      return *this;
+    }
+
+    RenderPassCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    RenderPassCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    RenderPassCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      attachmentCount = attachmentCount_;
+      return *this;
+    }
+
+    RenderPassCreateInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::AttachmentDescription* pAttachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pAttachments = pAttachments_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    RenderPassCreateInfo & setAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentDescription> const & attachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      attachmentCount = static_cast<uint32_t>( attachments_.size() );
+      pAttachments = attachments_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    RenderPassCreateInfo & setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      subpassCount = subpassCount_;
+      return *this;
+    }
+
+    RenderPassCreateInfo & setPSubpasses( const VULKAN_HPP_NAMESPACE::SubpassDescription* pSubpasses_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pSubpasses = pSubpasses_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    RenderPassCreateInfo & setSubpasses( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDescription> const & subpasses_ ) VULKAN_HPP_NOEXCEPT
+    {
+      subpassCount = static_cast<uint32_t>( subpasses_.size() );
+      pSubpasses = subpasses_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    RenderPassCreateInfo & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dependencyCount = dependencyCount_;
+      return *this;
+    }
+
+    RenderPassCreateInfo & setPDependencies( const VULKAN_HPP_NAMESPACE::SubpassDependency* pDependencies_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pDependencies = pDependencies_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    RenderPassCreateInfo & setDependencies( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDependency> const & dependencies_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dependencyCount = static_cast<uint32_t>( dependencies_.size() );
+      pDependencies = dependencies_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkRenderPassCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkRenderPassCreateInfo*>( this );
+    }
+
+    operator VkRenderPassCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkRenderPassCreateInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( RenderPassCreateInfo const& ) const = default;
+#else
+    bool operator==( RenderPassCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( attachmentCount == rhs.attachmentCount )
+          && ( pAttachments == rhs.pAttachments )
+          && ( subpassCount == rhs.subpassCount )
+          && ( pSubpasses == rhs.pSubpasses )
+          && ( dependencyCount == rhs.dependencyCount )
+          && ( pDependencies == rhs.pDependencies );
+    }
+
+    bool operator!=( RenderPassCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreateInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags = {};
+    uint32_t attachmentCount = {};
+    const VULKAN_HPP_NAMESPACE::AttachmentDescription* pAttachments = {};
+    uint32_t subpassCount = {};
+    const VULKAN_HPP_NAMESPACE::SubpassDescription* pSubpasses = {};
+    uint32_t dependencyCount = {};
+    const VULKAN_HPP_NAMESPACE::SubpassDependency* pDependencies = {};
+
+  };
+  static_assert( sizeof( RenderPassCreateInfo ) == sizeof( VkRenderPassCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<RenderPassCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eRenderPassCreateInfo>
+  {
+    using Type = RenderPassCreateInfo;
+  };
+
+  struct SubpassDescription2
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassDescription2;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR SubpassDescription2(VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ = {}, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, uint32_t viewMask_ = {}, uint32_t inputAttachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference2* pInputAttachments_ = {}, uint32_t colorAttachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference2* pColorAttachments_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference2* pResolveAttachments_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference2* pDepthStencilAttachment_ = {}, uint32_t preserveAttachmentCount_ = {}, const uint32_t* pPreserveAttachments_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), pipelineBindPoint( pipelineBindPoint_ ), viewMask( viewMask_ ), inputAttachmentCount( inputAttachmentCount_ ), pInputAttachments( pInputAttachments_ ), colorAttachmentCount( colorAttachmentCount_ ), pColorAttachments( pColorAttachments_ ), pResolveAttachments( pResolveAttachments_ ), pDepthStencilAttachment( pDepthStencilAttachment_ ), preserveAttachmentCount( preserveAttachmentCount_ ), pPreserveAttachments( pPreserveAttachments_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SubpassDescription2( SubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SubpassDescription2( VkSubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    SubpassDescription2( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, uint32_t viewMask_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference2> const & inputAttachments_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference2> const & colorAttachments_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference2> const & resolveAttachments_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference2* pDepthStencilAttachment_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & preserveAttachments_ = {} )
+    : flags( flags_ ), pipelineBindPoint( pipelineBindPoint_ ), viewMask( viewMask_ ), inputAttachmentCount( static_cast<uint32_t>( inputAttachments_.size() ) ), pInputAttachments( inputAttachments_.data() ), colorAttachmentCount( static_cast<uint32_t>( colorAttachments_.size() ) ), pColorAttachments( colorAttachments_.data() ), pResolveAttachments( resolveAttachments_.data() ), pDepthStencilAttachment( pDepthStencilAttachment_ ), preserveAttachmentCount( static_cast<uint32_t>( preserveAttachments_.size() ) ), pPreserveAttachments( preserveAttachments_.data() )
+    {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+      VULKAN_HPP_ASSERT( resolveAttachments_.empty() || ( colorAttachments_.size() == resolveAttachments_.size() ) );
+#else
+      if ( !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() ) )
+      {
+        throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::SubpassDescription2::SubpassDescription2: !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() )" );
+      }
+#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    SubpassDescription2 & operator=( VkSubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassDescription2 const *>( &rhs );
+      return *this;
+    }
+
+    SubpassDescription2 & operator=( SubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( SubpassDescription2 ) );
+      return *this;
+    }
+
+    SubpassDescription2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    SubpassDescription2 & setFlags( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    SubpassDescription2 & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipelineBindPoint = pipelineBindPoint_;
+      return *this;
+    }
+
+    SubpassDescription2 & setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      viewMask = viewMask_;
+      return *this;
+    }
+
+    SubpassDescription2 & setInputAttachmentCount( uint32_t inputAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      inputAttachmentCount = inputAttachmentCount_;
+      return *this;
+    }
+
+    SubpassDescription2 & setPInputAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2* pInputAttachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pInputAttachments = pInputAttachments_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    SubpassDescription2 & setInputAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference2> const & inputAttachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      inputAttachmentCount = static_cast<uint32_t>( inputAttachments_.size() );
+      pInputAttachments = inputAttachments_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    SubpassDescription2 & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      colorAttachmentCount = colorAttachmentCount_;
+      return *this;
+    }
+
+    SubpassDescription2 & setPColorAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2* pColorAttachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pColorAttachments = pColorAttachments_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    SubpassDescription2 & setColorAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference2> const & colorAttachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      colorAttachmentCount = static_cast<uint32_t>( colorAttachments_.size() );
+      pColorAttachments = colorAttachments_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    SubpassDescription2 & setPResolveAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2* pResolveAttachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pResolveAttachments = pResolveAttachments_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    SubpassDescription2 & setResolveAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference2> const & resolveAttachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      colorAttachmentCount = static_cast<uint32_t>( resolveAttachments_.size() );
+      pResolveAttachments = resolveAttachments_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    SubpassDescription2 & setPDepthStencilAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference2* pDepthStencilAttachment_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pDepthStencilAttachment = pDepthStencilAttachment_;
+      return *this;
+    }
+
+    SubpassDescription2 & setPreserveAttachmentCount( uint32_t preserveAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      preserveAttachmentCount = preserveAttachmentCount_;
+      return *this;
+    }
+
+    SubpassDescription2 & setPPreserveAttachments( const uint32_t* pPreserveAttachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pPreserveAttachments = pPreserveAttachments_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    SubpassDescription2 & setPreserveAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & preserveAttachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      preserveAttachmentCount = static_cast<uint32_t>( preserveAttachments_.size() );
+      pPreserveAttachments = preserveAttachments_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkSubpassDescription2 const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSubpassDescription2*>( this );
+    }
+
+    operator VkSubpassDescription2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSubpassDescription2*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( SubpassDescription2 const& ) const = default;
+#else
+    bool operator==( SubpassDescription2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( pipelineBindPoint == rhs.pipelineBindPoint )
+          && ( viewMask == rhs.viewMask )
+          && ( inputAttachmentCount == rhs.inputAttachmentCount )
+          && ( pInputAttachments == rhs.pInputAttachments )
+          && ( colorAttachmentCount == rhs.colorAttachmentCount )
+          && ( pColorAttachments == rhs.pColorAttachments )
+          && ( pResolveAttachments == rhs.pResolveAttachments )
+          && ( pDepthStencilAttachment == rhs.pDepthStencilAttachment )
+          && ( preserveAttachmentCount == rhs.preserveAttachmentCount )
+          && ( pPreserveAttachments == rhs.pPreserveAttachments );
+    }
+
+    bool operator!=( SubpassDescription2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDescription2;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags = {};
+    VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
+    uint32_t viewMask = {};
+    uint32_t inputAttachmentCount = {};
+    const VULKAN_HPP_NAMESPACE::AttachmentReference2* pInputAttachments = {};
+    uint32_t colorAttachmentCount = {};
+    const VULKAN_HPP_NAMESPACE::AttachmentReference2* pColorAttachments = {};
+    const VULKAN_HPP_NAMESPACE::AttachmentReference2* pResolveAttachments = {};
+    const VULKAN_HPP_NAMESPACE::AttachmentReference2* pDepthStencilAttachment = {};
+    uint32_t preserveAttachmentCount = {};
+    const uint32_t* pPreserveAttachments = {};
+
+  };
+  static_assert( sizeof( SubpassDescription2 ) == sizeof( VkSubpassDescription2 ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SubpassDescription2>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eSubpassDescription2>
+  {
+    using Type = SubpassDescription2;
+  };
+  using SubpassDescription2KHR = SubpassDescription2;
+
+  struct SubpassDependency2
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassDependency2;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR SubpassDependency2(uint32_t srcSubpass_ = {}, uint32_t dstSubpass_ = {}, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ = {}, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {}, int32_t viewOffset_ = {}) VULKAN_HPP_NOEXCEPT
+    : srcSubpass( srcSubpass_ ), dstSubpass( dstSubpass_ ), srcStageMask( srcStageMask_ ), dstStageMask( dstStageMask_ ), srcAccessMask( srcAccessMask_ ), dstAccessMask( dstAccessMask_ ), dependencyFlags( dependencyFlags_ ), viewOffset( viewOffset_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SubpassDependency2( SubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SubpassDependency2( VkSubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    SubpassDependency2 & operator=( VkSubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassDependency2 const *>( &rhs );
+      return *this;
+    }
+
+    SubpassDependency2 & operator=( SubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( SubpassDependency2 ) );
+      return *this;
+    }
+
+    SubpassDependency2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    SubpassDependency2 & setSrcSubpass( uint32_t srcSubpass_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcSubpass = srcSubpass_;
+      return *this;
+    }
+
+    SubpassDependency2 & setDstSubpass( uint32_t dstSubpass_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstSubpass = dstSubpass_;
+      return *this;
+    }
+
+    SubpassDependency2 & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcStageMask = srcStageMask_;
+      return *this;
+    }
+
+    SubpassDependency2 & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstStageMask = dstStageMask_;
+      return *this;
+    }
+
+    SubpassDependency2 & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcAccessMask = srcAccessMask_;
+      return *this;
+    }
+
+    SubpassDependency2 & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstAccessMask = dstAccessMask_;
+      return *this;
+    }
+
+    SubpassDependency2 & setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dependencyFlags = dependencyFlags_;
+      return *this;
+    }
+
+    SubpassDependency2 & setViewOffset( int32_t viewOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      viewOffset = viewOffset_;
+      return *this;
+    }
+
+
+    operator VkSubpassDependency2 const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSubpassDependency2*>( this );
+    }
+
+    operator VkSubpassDependency2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSubpassDependency2*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( SubpassDependency2 const& ) const = default;
+#else
+    bool operator==( SubpassDependency2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( srcSubpass == rhs.srcSubpass )
+          && ( dstSubpass == rhs.dstSubpass )
+          && ( srcStageMask == rhs.srcStageMask )
+          && ( dstStageMask == rhs.dstStageMask )
+          && ( srcAccessMask == rhs.srcAccessMask )
+          && ( dstAccessMask == rhs.dstAccessMask )
+          && ( dependencyFlags == rhs.dependencyFlags )
+          && ( viewOffset == rhs.viewOffset );
+    }
+
+    bool operator!=( SubpassDependency2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDependency2;
+    const void* pNext = {};
+    uint32_t srcSubpass = {};
+    uint32_t dstSubpass = {};
+    VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask = {};
+    VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask = {};
+    VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {};
+    VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {};
+    VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags = {};
+    int32_t viewOffset = {};
+
+  };
+  static_assert( sizeof( SubpassDependency2 ) == sizeof( VkSubpassDependency2 ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SubpassDependency2>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eSubpassDependency2>
+  {
+    using Type = SubpassDependency2;
+  };
+  using SubpassDependency2KHR = SubpassDependency2;
+
+  struct RenderPassCreateInfo2
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassCreateInfo2;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR RenderPassCreateInfo2(VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ = {}, uint32_t attachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentDescription2* pAttachments_ = {}, uint32_t subpassCount_ = {}, const VULKAN_HPP_NAMESPACE::SubpassDescription2* pSubpasses_ = {}, uint32_t dependencyCount_ = {}, const VULKAN_HPP_NAMESPACE::SubpassDependency2* pDependencies_ = {}, uint32_t correlatedViewMaskCount_ = {}, const uint32_t* pCorrelatedViewMasks_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), attachmentCount( attachmentCount_ ), pAttachments( pAttachments_ ), subpassCount( subpassCount_ ), pSubpasses( pSubpasses_ ), dependencyCount( dependencyCount_ ), pDependencies( pDependencies_ ), correlatedViewMaskCount( correlatedViewMaskCount_ ), pCorrelatedViewMasks( pCorrelatedViewMasks_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR RenderPassCreateInfo2( RenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    RenderPassCreateInfo2( VkRenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    RenderPassCreateInfo2( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentDescription2> const & attachments_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDescription2> const & subpasses_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDependency2> const & dependencies_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & correlatedViewMasks_ = {} )
+    : flags( flags_ ), attachmentCount( static_cast<uint32_t>( attachments_.size() ) ), pAttachments( attachments_.data() ), subpassCount( static_cast<uint32_t>( subpasses_.size() ) ), pSubpasses( subpasses_.data() ), dependencyCount( static_cast<uint32_t>( dependencies_.size() ) ), pDependencies( dependencies_.data() ), correlatedViewMaskCount( static_cast<uint32_t>( correlatedViewMasks_.size() ) ), pCorrelatedViewMasks( correlatedViewMasks_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    RenderPassCreateInfo2 & operator=( VkRenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const *>( &rhs );
+      return *this;
+    }
+
+    RenderPassCreateInfo2 & operator=( RenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( RenderPassCreateInfo2 ) );
+      return *this;
+    }
+
+    RenderPassCreateInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    RenderPassCreateInfo2 & setFlags( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    RenderPassCreateInfo2 & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      attachmentCount = attachmentCount_;
+      return *this;
+    }
+
+    RenderPassCreateInfo2 & setPAttachments( const VULKAN_HPP_NAMESPACE::AttachmentDescription2* pAttachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pAttachments = pAttachments_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    RenderPassCreateInfo2 & setAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentDescription2> const & attachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      attachmentCount = static_cast<uint32_t>( attachments_.size() );
+      pAttachments = attachments_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    RenderPassCreateInfo2 & setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      subpassCount = subpassCount_;
+      return *this;
+    }
+
+    RenderPassCreateInfo2 & setPSubpasses( const VULKAN_HPP_NAMESPACE::SubpassDescription2* pSubpasses_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pSubpasses = pSubpasses_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    RenderPassCreateInfo2 & setSubpasses( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDescription2> const & subpasses_ ) VULKAN_HPP_NOEXCEPT
+    {
+      subpassCount = static_cast<uint32_t>( subpasses_.size() );
+      pSubpasses = subpasses_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    RenderPassCreateInfo2 & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dependencyCount = dependencyCount_;
+      return *this;
+    }
+
+    RenderPassCreateInfo2 & setPDependencies( const VULKAN_HPP_NAMESPACE::SubpassDependency2* pDependencies_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pDependencies = pDependencies_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    RenderPassCreateInfo2 & setDependencies( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDependency2> const & dependencies_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dependencyCount = static_cast<uint32_t>( dependencies_.size() );
+      pDependencies = dependencies_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    RenderPassCreateInfo2 & setCorrelatedViewMaskCount( uint32_t correlatedViewMaskCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      correlatedViewMaskCount = correlatedViewMaskCount_;
+      return *this;
+    }
+
+    RenderPassCreateInfo2 & setPCorrelatedViewMasks( const uint32_t* pCorrelatedViewMasks_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pCorrelatedViewMasks = pCorrelatedViewMasks_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    RenderPassCreateInfo2 & setCorrelatedViewMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & correlatedViewMasks_ ) VULKAN_HPP_NOEXCEPT
+    {
+      correlatedViewMaskCount = static_cast<uint32_t>( correlatedViewMasks_.size() );
+      pCorrelatedViewMasks = correlatedViewMasks_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkRenderPassCreateInfo2 const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkRenderPassCreateInfo2*>( this );
+    }
+
+    operator VkRenderPassCreateInfo2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkRenderPassCreateInfo2*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( RenderPassCreateInfo2 const& ) const = default;
+#else
+    bool operator==( RenderPassCreateInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( attachmentCount == rhs.attachmentCount )
+          && ( pAttachments == rhs.pAttachments )
+          && ( subpassCount == rhs.subpassCount )
+          && ( pSubpasses == rhs.pSubpasses )
+          && ( dependencyCount == rhs.dependencyCount )
+          && ( pDependencies == rhs.pDependencies )
+          && ( correlatedViewMaskCount == rhs.correlatedViewMaskCount )
+          && ( pCorrelatedViewMasks == rhs.pCorrelatedViewMasks );
+    }
+
+    bool operator!=( RenderPassCreateInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreateInfo2;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags = {};
+    uint32_t attachmentCount = {};
+    const VULKAN_HPP_NAMESPACE::AttachmentDescription2* pAttachments = {};
+    uint32_t subpassCount = {};
+    const VULKAN_HPP_NAMESPACE::SubpassDescription2* pSubpasses = {};
+    uint32_t dependencyCount = {};
+    const VULKAN_HPP_NAMESPACE::SubpassDependency2* pDependencies = {};
+    uint32_t correlatedViewMaskCount = {};
+    const uint32_t* pCorrelatedViewMasks = {};
+
+  };
+  static_assert( sizeof( RenderPassCreateInfo2 ) == sizeof( VkRenderPassCreateInfo2 ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<RenderPassCreateInfo2>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eRenderPassCreateInfo2>
+  {
+    using Type = RenderPassCreateInfo2;
+  };
+  using RenderPassCreateInfo2KHR = RenderPassCreateInfo2;
+
+  struct SamplerCreateInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR SamplerCreateInfo(VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Filter magFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, VULKAN_HPP_NAMESPACE::Filter minFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode_ = VULKAN_HPP_NAMESPACE::SamplerMipmapMode::eNearest, VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat, VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat, VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat, float mipLodBias_ = {}, VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable_ = {}, float maxAnisotropy_ = {}, VULKAN_HPP_NAMESPACE::Bool32 compareEnable_ = {}, VULKAN_HPP_NAMESPACE::CompareOp compareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever, float minLod_ = {}, float maxLod_ = {}, VULKAN_HPP_NAMESPACE::BorderColor borderColor_ = VULKAN_HPP_NAMESPACE::BorderColor::eFloatTransparentBlack, VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), magFilter( magFilter_ ), minFilter( minFilter_ ), mipmapMode( mipmapMode_ ), addressModeU( addressModeU_ ), addressModeV( addressModeV_ ), addressModeW( addressModeW_ ), mipLodBias( mipLodBias_ ), anisotropyEnable( anisotropyEnable_ ), maxAnisotropy( maxAnisotropy_ ), compareEnable( compareEnable_ ), compareOp( compareOp_ ), minLod( minLod_ ), maxLod( maxLod_ ), borderColor( borderColor_ ), unnormalizedCoordinates( unnormalizedCoordinates_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SamplerCreateInfo( SamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SamplerCreateInfo( VkSamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    SamplerCreateInfo & operator=( VkSamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+    SamplerCreateInfo & operator=( SamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( SamplerCreateInfo ) );
+      return *this;
+    }
+
+    SamplerCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    SamplerCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    SamplerCreateInfo & setMagFilter( VULKAN_HPP_NAMESPACE::Filter magFilter_ ) VULKAN_HPP_NOEXCEPT
+    {
+      magFilter = magFilter_;
+      return *this;
+    }
+
+    SamplerCreateInfo & setMinFilter( VULKAN_HPP_NAMESPACE::Filter minFilter_ ) VULKAN_HPP_NOEXCEPT
+    {
+      minFilter = minFilter_;
+      return *this;
+    }
+
+    SamplerCreateInfo & setMipmapMode( VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      mipmapMode = mipmapMode_;
+      return *this;
+    }
+
+    SamplerCreateInfo & setAddressModeU( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU_ ) VULKAN_HPP_NOEXCEPT
+    {
+      addressModeU = addressModeU_;
+      return *this;
+    }
+
+    SamplerCreateInfo & setAddressModeV( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV_ ) VULKAN_HPP_NOEXCEPT
+    {
+      addressModeV = addressModeV_;
+      return *this;
+    }
+
+    SamplerCreateInfo & setAddressModeW( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW_ ) VULKAN_HPP_NOEXCEPT
+    {
+      addressModeW = addressModeW_;
+      return *this;
+    }
+
+    SamplerCreateInfo & setMipLodBias( float mipLodBias_ ) VULKAN_HPP_NOEXCEPT
+    {
+      mipLodBias = mipLodBias_;
+      return *this;
+    }
+
+    SamplerCreateInfo & setAnisotropyEnable( VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      anisotropyEnable = anisotropyEnable_;
+      return *this;
+    }
+
+    SamplerCreateInfo & setMaxAnisotropy( float maxAnisotropy_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxAnisotropy = maxAnisotropy_;
+      return *this;
+    }
+
+    SamplerCreateInfo & setCompareEnable( VULKAN_HPP_NAMESPACE::Bool32 compareEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      compareEnable = compareEnable_;
+      return *this;
+    }
+
+    SamplerCreateInfo & setCompareOp( VULKAN_HPP_NAMESPACE::CompareOp compareOp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      compareOp = compareOp_;
+      return *this;
+    }
+
+    SamplerCreateInfo & setMinLod( float minLod_ ) VULKAN_HPP_NOEXCEPT
+    {
+      minLod = minLod_;
+      return *this;
+    }
+
+    SamplerCreateInfo & setMaxLod( float maxLod_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxLod = maxLod_;
+      return *this;
+    }
+
+    SamplerCreateInfo & setBorderColor( VULKAN_HPP_NAMESPACE::BorderColor borderColor_ ) VULKAN_HPP_NOEXCEPT
+    {
+      borderColor = borderColor_;
+      return *this;
+    }
+
+    SamplerCreateInfo & setUnnormalizedCoordinates( VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates_ ) VULKAN_HPP_NOEXCEPT
+    {
+      unnormalizedCoordinates = unnormalizedCoordinates_;
+      return *this;
+    }
+
+
+    operator VkSamplerCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSamplerCreateInfo*>( this );
+    }
+
+    operator VkSamplerCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSamplerCreateInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( SamplerCreateInfo const& ) const = default;
+#else
+    bool operator==( SamplerCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( magFilter == rhs.magFilter )
+          && ( minFilter == rhs.minFilter )
+          && ( mipmapMode == rhs.mipmapMode )
+          && ( addressModeU == rhs.addressModeU )
+          && ( addressModeV == rhs.addressModeV )
+          && ( addressModeW == rhs.addressModeW )
+          && ( mipLodBias == rhs.mipLodBias )
+          && ( anisotropyEnable == rhs.anisotropyEnable )
+          && ( maxAnisotropy == rhs.maxAnisotropy )
+          && ( compareEnable == rhs.compareEnable )
+          && ( compareOp == rhs.compareOp )
+          && ( minLod == rhs.minLod )
+          && ( maxLod == rhs.maxLod )
+          && ( borderColor == rhs.borderColor )
+          && ( unnormalizedCoordinates == rhs.unnormalizedCoordinates );
+    }
+
+    bool operator!=( SamplerCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerCreateInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags = {};
+    VULKAN_HPP_NAMESPACE::Filter magFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest;
+    VULKAN_HPP_NAMESPACE::Filter minFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest;
+    VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode = VULKAN_HPP_NAMESPACE::SamplerMipmapMode::eNearest;
+    VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat;
+    VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat;
+    VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat;
+    float mipLodBias = {};
+    VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable = {};
+    float maxAnisotropy = {};
+    VULKAN_HPP_NAMESPACE::Bool32 compareEnable = {};
+    VULKAN_HPP_NAMESPACE::CompareOp compareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever;
+    float minLod = {};
+    float maxLod = {};
+    VULKAN_HPP_NAMESPACE::BorderColor borderColor = VULKAN_HPP_NAMESPACE::BorderColor::eFloatTransparentBlack;
+    VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates = {};
+
+  };
+  static_assert( sizeof( SamplerCreateInfo ) == sizeof( VkSamplerCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SamplerCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eSamplerCreateInfo>
+  {
+    using Type = SamplerCreateInfo;
+  };
+
+  struct SamplerYcbcrConversionCreateInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerYcbcrConversionCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionCreateInfo(VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity, VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull, VULKAN_HPP_NAMESPACE::ComponentMapping components_ = {}, VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, VULKAN_HPP_NAMESPACE::Filter chromaFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction_ = {}) VULKAN_HPP_NOEXCEPT
+    : format( format_ ), ycbcrModel( ycbcrModel_ ), ycbcrRange( ycbcrRange_ ), components( components_ ), xChromaOffset( xChromaOffset_ ), yChromaOffset( yChromaOffset_ ), chromaFilter( chromaFilter_ ), forceExplicitReconstruction( forceExplicitReconstruction_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionCreateInfo( SamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SamplerYcbcrConversionCreateInfo( VkSamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    SamplerYcbcrConversionCreateInfo & operator=( VkSamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+    SamplerYcbcrConversionCreateInfo & operator=( SamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( SamplerYcbcrConversionCreateInfo ) );
+      return *this;
+    }
+
+    SamplerYcbcrConversionCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    SamplerYcbcrConversionCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
+    {
+      format = format_;
+      return *this;
+    }
+
+    SamplerYcbcrConversionCreateInfo & setYcbcrModel( VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel_ ) VULKAN_HPP_NOEXCEPT
+    {
+      ycbcrModel = ycbcrModel_;
+      return *this;
+    }
+
+    SamplerYcbcrConversionCreateInfo & setYcbcrRange( VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange_ ) VULKAN_HPP_NOEXCEPT
+    {
+      ycbcrRange = ycbcrRange_;
+      return *this;
+    }
+
+    SamplerYcbcrConversionCreateInfo & setComponents( VULKAN_HPP_NAMESPACE::ComponentMapping const & components_ ) VULKAN_HPP_NOEXCEPT
+    {
+      components = components_;
+      return *this;
+    }
+
+    SamplerYcbcrConversionCreateInfo & setXChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      xChromaOffset = xChromaOffset_;
+      return *this;
+    }
+
+    SamplerYcbcrConversionCreateInfo & setYChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      yChromaOffset = yChromaOffset_;
+      return *this;
+    }
+
+    SamplerYcbcrConversionCreateInfo & setChromaFilter( VULKAN_HPP_NAMESPACE::Filter chromaFilter_ ) VULKAN_HPP_NOEXCEPT
+    {
+      chromaFilter = chromaFilter_;
+      return *this;
+    }
+
+    SamplerYcbcrConversionCreateInfo & setForceExplicitReconstruction( VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction_ ) VULKAN_HPP_NOEXCEPT
+    {
+      forceExplicitReconstruction = forceExplicitReconstruction_;
+      return *this;
+    }
+
+
+    operator VkSamplerYcbcrConversionCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo*>( this );
+    }
+
+    operator VkSamplerYcbcrConversionCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSamplerYcbcrConversionCreateInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( SamplerYcbcrConversionCreateInfo const& ) const = default;
+#else
+    bool operator==( SamplerYcbcrConversionCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( format == rhs.format )
+          && ( ycbcrModel == rhs.ycbcrModel )
+          && ( ycbcrRange == rhs.ycbcrRange )
+          && ( components == rhs.components )
+          && ( xChromaOffset == rhs.xChromaOffset )
+          && ( yChromaOffset == rhs.yChromaOffset )
+          && ( chromaFilter == rhs.chromaFilter )
+          && ( forceExplicitReconstruction == rhs.forceExplicitReconstruction );
+    }
+
+    bool operator!=( SamplerYcbcrConversionCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionCreateInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+    VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity;
+    VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull;
+    VULKAN_HPP_NAMESPACE::ComponentMapping components = {};
+    VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
+    VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
+    VULKAN_HPP_NAMESPACE::Filter chromaFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest;
+    VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction = {};
+
+  };
+  static_assert( sizeof( SamplerYcbcrConversionCreateInfo ) == sizeof( VkSamplerYcbcrConversionCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SamplerYcbcrConversionCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eSamplerYcbcrConversionCreateInfo>
+  {
+    using Type = SamplerYcbcrConversionCreateInfo;
+  };
+  using SamplerYcbcrConversionCreateInfoKHR = SamplerYcbcrConversionCreateInfo;
+
+  class SamplerYcbcrConversion
+  {
+  public:
+    using CType = VkSamplerYcbcrConversion;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSamplerYcbcrConversion;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSamplerYcbcrConversion;
+
+  public:
+    VULKAN_HPP_CONSTEXPR SamplerYcbcrConversion() VULKAN_HPP_NOEXCEPT
+      : m_samplerYcbcrConversion(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR SamplerYcbcrConversion( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_samplerYcbcrConversion(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT SamplerYcbcrConversion( VkSamplerYcbcrConversion samplerYcbcrConversion ) VULKAN_HPP_NOEXCEPT
+      : m_samplerYcbcrConversion( samplerYcbcrConversion )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    SamplerYcbcrConversion & operator=(VkSamplerYcbcrConversion samplerYcbcrConversion) VULKAN_HPP_NOEXCEPT
+    {
+      m_samplerYcbcrConversion = samplerYcbcrConversion;
+      return *this;
+    }
+#endif
+
+    SamplerYcbcrConversion & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_samplerYcbcrConversion = VK_NULL_HANDLE;
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( SamplerYcbcrConversion const& ) const = default;
+#else
+    bool operator==( SamplerYcbcrConversion const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_samplerYcbcrConversion == rhs.m_samplerYcbcrConversion;
+    }
+
+    bool operator!=(SamplerYcbcrConversion const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_samplerYcbcrConversion != rhs.m_samplerYcbcrConversion;
+    }
+
+    bool operator<(SamplerYcbcrConversion const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_samplerYcbcrConversion < rhs.m_samplerYcbcrConversion;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSamplerYcbcrConversion() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_samplerYcbcrConversion;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_samplerYcbcrConversion != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_samplerYcbcrConversion == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkSamplerYcbcrConversion m_samplerYcbcrConversion;
+  };
+  static_assert( sizeof( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ) == sizeof( VkSamplerYcbcrConversion ), "handle and wrapper have different size!" );
+
+  template <>
+  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eSamplerYcbcrConversion>
+  {
+    using type = VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion;
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eSamplerYcbcrConversion>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSamplerYcbcrConversion>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+  using SamplerYcbcrConversionKHR = SamplerYcbcrConversion;
+
+  struct SemaphoreCreateInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR SemaphoreCreateInfo(VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SemaphoreCreateInfo( SemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SemaphoreCreateInfo( VkSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    SemaphoreCreateInfo & operator=( VkSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+    SemaphoreCreateInfo & operator=( SemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( SemaphoreCreateInfo ) );
+      return *this;
+    }
+
+    SemaphoreCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    SemaphoreCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+
+    operator VkSemaphoreCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSemaphoreCreateInfo*>( this );
+    }
+
+    operator VkSemaphoreCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSemaphoreCreateInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( SemaphoreCreateInfo const& ) const = default;
+#else
+    bool operator==( SemaphoreCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags );
+    }
+
+    bool operator!=( SemaphoreCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreCreateInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags = {};
+
+  };
+  static_assert( sizeof( SemaphoreCreateInfo ) == sizeof( VkSemaphoreCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SemaphoreCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eSemaphoreCreateInfo>
+  {
+    using Type = SemaphoreCreateInfo;
+  };
+
+  struct ShaderModuleCreateInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eShaderModuleCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ShaderModuleCreateInfo(VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_ = {}, size_t codeSize_ = {}, const uint32_t* pCode_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), codeSize( codeSize_ ), pCode( pCode_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ShaderModuleCreateInfo( ShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ShaderModuleCreateInfo( VkShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    ShaderModuleCreateInfo( VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & code_ )
+    : flags( flags_ ), codeSize( code_.size() * 4 ), pCode( code_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ShaderModuleCreateInfo & operator=( VkShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+    ShaderModuleCreateInfo & operator=( ShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ShaderModuleCreateInfo ) );
+      return *this;
+    }
+
+    ShaderModuleCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ShaderModuleCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    ShaderModuleCreateInfo & setCodeSize( size_t codeSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      codeSize = codeSize_;
+      return *this;
+    }
+
+    ShaderModuleCreateInfo & setPCode( const uint32_t* pCode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pCode = pCode_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    ShaderModuleCreateInfo & setCode( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & code_ ) VULKAN_HPP_NOEXCEPT
+    {
+      codeSize = code_.size() * 4;
+      pCode = code_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkShaderModuleCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkShaderModuleCreateInfo*>( this );
+    }
+
+    operator VkShaderModuleCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkShaderModuleCreateInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ShaderModuleCreateInfo const& ) const = default;
+#else
+    bool operator==( ShaderModuleCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( codeSize == rhs.codeSize )
+          && ( pCode == rhs.pCode );
+    }
+
+    bool operator!=( ShaderModuleCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eShaderModuleCreateInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags = {};
+    size_t codeSize = {};
+    const uint32_t* pCode = {};
+
+  };
+  static_assert( sizeof( ShaderModuleCreateInfo ) == sizeof( VkShaderModuleCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ShaderModuleCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eShaderModuleCreateInfo>
+  {
+    using Type = ShaderModuleCreateInfo;
+  };
+
+  class SurfaceKHR
+  {
+  public:
+    using CType = VkSurfaceKHR;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSurfaceKHR;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSurfaceKHR;
+
+  public:
+    VULKAN_HPP_CONSTEXPR SurfaceKHR() VULKAN_HPP_NOEXCEPT
+      : m_surfaceKHR(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR SurfaceKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_surfaceKHR(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT SurfaceKHR( VkSurfaceKHR surfaceKHR ) VULKAN_HPP_NOEXCEPT
+      : m_surfaceKHR( surfaceKHR )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    SurfaceKHR & operator=(VkSurfaceKHR surfaceKHR) VULKAN_HPP_NOEXCEPT
+    {
+      m_surfaceKHR = surfaceKHR;
+      return *this;
+    }
+#endif
+
+    SurfaceKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_surfaceKHR = VK_NULL_HANDLE;
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( SurfaceKHR const& ) const = default;
+#else
+    bool operator==( SurfaceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_surfaceKHR == rhs.m_surfaceKHR;
+    }
+
+    bool operator!=(SurfaceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_surfaceKHR != rhs.m_surfaceKHR;
+    }
+
+    bool operator<(SurfaceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_surfaceKHR < rhs.m_surfaceKHR;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSurfaceKHR() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_surfaceKHR;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_surfaceKHR != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_surfaceKHR == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkSurfaceKHR m_surfaceKHR;
+  };
+  static_assert( sizeof( VULKAN_HPP_NAMESPACE::SurfaceKHR ) == sizeof( VkSurfaceKHR ), "handle and wrapper have different size!" );
+
+  template <>
+  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eSurfaceKHR>
+  {
+    using type = VULKAN_HPP_NAMESPACE::SurfaceKHR;
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eSurfaceKHR>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::SurfaceKHR;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSurfaceKHR>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::SurfaceKHR;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::SurfaceKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  struct SwapchainCreateInfoKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainCreateInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR SwapchainCreateInfoKHR(VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_ = {}, VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ = {}, uint32_t minImageCount_ = {}, VULKAN_HPP_NAMESPACE::Format imageFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_ = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear, VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ = {}, uint32_t imageArrayLayers_ = {}, VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ = {}, VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, uint32_t queueFamilyIndexCount_ = {}, const uint32_t* pQueueFamilyIndices_ = {}, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ = VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque, VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate, VULKAN_HPP_NAMESPACE::Bool32 clipped_ = {}, VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), surface( surface_ ), minImageCount( minImageCount_ ), imageFormat( imageFormat_ ), imageColorSpace( imageColorSpace_ ), imageExtent( imageExtent_ ), imageArrayLayers( imageArrayLayers_ ), imageUsage( imageUsage_ ), imageSharingMode( imageSharingMode_ ), queueFamilyIndexCount( queueFamilyIndexCount_ ), pQueueFamilyIndices( pQueueFamilyIndices_ ), preTransform( preTransform_ ), compositeAlpha( compositeAlpha_ ), presentMode( presentMode_ ), clipped( clipped_ ), oldSwapchain( oldSwapchain_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SwapchainCreateInfoKHR( SwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SwapchainCreateInfoKHR( VkSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    SwapchainCreateInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_, VULKAN_HPP_NAMESPACE::SurfaceKHR surface_, uint32_t minImageCount_, VULKAN_HPP_NAMESPACE::Format imageFormat_, VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_, VULKAN_HPP_NAMESPACE::Extent2D imageExtent_, uint32_t imageArrayLayers_, VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_, VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ = VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque, VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate, VULKAN_HPP_NAMESPACE::Bool32 clipped_ = {}, VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ = {} )
+    : flags( flags_ ), surface( surface_ ), minImageCount( minImageCount_ ), imageFormat( imageFormat_ ), imageColorSpace( imageColorSpace_ ), imageExtent( imageExtent_ ), imageArrayLayers( imageArrayLayers_ ), imageUsage( imageUsage_ ), imageSharingMode( imageSharingMode_ ), queueFamilyIndexCount( static_cast<uint32_t>( queueFamilyIndices_.size() ) ), pQueueFamilyIndices( queueFamilyIndices_.data() ), preTransform( preTransform_ ), compositeAlpha( compositeAlpha_ ), presentMode( presentMode_ ), clipped( clipped_ ), oldSwapchain( oldSwapchain_ )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    SwapchainCreateInfoKHR & operator=( VkSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR & operator=( SwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( SwapchainCreateInfoKHR ) );
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR & setSurface( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ ) VULKAN_HPP_NOEXCEPT
+    {
+      surface = surface_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR & setMinImageCount( uint32_t minImageCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      minImageCount = minImageCount_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR & setImageFormat( VULKAN_HPP_NAMESPACE::Format imageFormat_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageFormat = imageFormat_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR & setImageColorSpace( VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageColorSpace = imageColorSpace_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR & setImageExtent( VULKAN_HPP_NAMESPACE::Extent2D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageExtent = imageExtent_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR & setImageArrayLayers( uint32_t imageArrayLayers_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageArrayLayers = imageArrayLayers_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR & setImageUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageUsage = imageUsage_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR & setImageSharingMode( VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageSharingMode = imageSharingMode_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queueFamilyIndexCount = queueFamilyIndexCount_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR & setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pQueueFamilyIndices = pQueueFamilyIndices_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    SwapchainCreateInfoKHR & setQueueFamilyIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queueFamilyIndexCount = static_cast<uint32_t>( queueFamilyIndices_.size() );
+      pQueueFamilyIndices = queueFamilyIndices_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    SwapchainCreateInfoKHR & setPreTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ ) VULKAN_HPP_NOEXCEPT
+    {
+      preTransform = preTransform_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR & setCompositeAlpha( VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ ) VULKAN_HPP_NOEXCEPT
+    {
+      compositeAlpha = compositeAlpha_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR & setPresentMode( VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      presentMode = presentMode_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR & setClipped( VULKAN_HPP_NAMESPACE::Bool32 clipped_ ) VULKAN_HPP_NOEXCEPT
+    {
+      clipped = clipped_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR & setOldSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ ) VULKAN_HPP_NOEXCEPT
+    {
+      oldSwapchain = oldSwapchain_;
+      return *this;
+    }
+
+
+    operator VkSwapchainCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSwapchainCreateInfoKHR*>( this );
+    }
+
+    operator VkSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSwapchainCreateInfoKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( SwapchainCreateInfoKHR const& ) const = default;
+#else
+    bool operator==( SwapchainCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( surface == rhs.surface )
+          && ( minImageCount == rhs.minImageCount )
+          && ( imageFormat == rhs.imageFormat )
+          && ( imageColorSpace == rhs.imageColorSpace )
+          && ( imageExtent == rhs.imageExtent )
+          && ( imageArrayLayers == rhs.imageArrayLayers )
+          && ( imageUsage == rhs.imageUsage )
+          && ( imageSharingMode == rhs.imageSharingMode )
+          && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount )
+          && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices )
+          && ( preTransform == rhs.preTransform )
+          && ( compositeAlpha == rhs.compositeAlpha )
+          && ( presentMode == rhs.presentMode )
+          && ( clipped == rhs.clipped )
+          && ( oldSwapchain == rhs.oldSwapchain );
+    }
+
+    bool operator!=( SwapchainCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainCreateInfoKHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags = {};
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface = {};
+    uint32_t minImageCount = {};
+    VULKAN_HPP_NAMESPACE::Format imageFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+    VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear;
+    VULKAN_HPP_NAMESPACE::Extent2D imageExtent = {};
+    uint32_t imageArrayLayers = {};
+    VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage = {};
+    VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive;
+    uint32_t queueFamilyIndexCount = {};
+    const uint32_t* pQueueFamilyIndices = {};
+    VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
+    VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha = VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque;
+    VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate;
+    VULKAN_HPP_NAMESPACE::Bool32 clipped = {};
+    VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain = {};
+
+  };
+  static_assert( sizeof( SwapchainCreateInfoKHR ) == sizeof( VkSwapchainCreateInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SwapchainCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eSwapchainCreateInfoKHR>
+  {
+    using Type = SwapchainCreateInfoKHR;
+  };
+
+  struct ValidationCacheCreateInfoEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eValidationCacheCreateInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ValidationCacheCreateInfoEXT(VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_ = {}, size_t initialDataSize_ = {}, const void* pInitialData_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), initialDataSize( initialDataSize_ ), pInitialData( pInitialData_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ValidationCacheCreateInfoEXT( ValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ValidationCacheCreateInfoEXT( VkValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    template <typename T>
+    ValidationCacheCreateInfoEXT( VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & initialData_ )
+    : flags( flags_ ), initialDataSize( initialData_.size() * sizeof(T) ), pInitialData( initialData_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ValidationCacheCreateInfoEXT & operator=( VkValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+    ValidationCacheCreateInfoEXT & operator=( ValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ValidationCacheCreateInfoEXT ) );
+      return *this;
+    }
+
+    ValidationCacheCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ValidationCacheCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    ValidationCacheCreateInfoEXT & setInitialDataSize( size_t initialDataSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      initialDataSize = initialDataSize_;
+      return *this;
+    }
+
+    ValidationCacheCreateInfoEXT & setPInitialData( const void* pInitialData_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pInitialData = pInitialData_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    template <typename T>
+    ValidationCacheCreateInfoEXT & setInitialData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & initialData_ ) VULKAN_HPP_NOEXCEPT
+    {
+      initialDataSize = initialData_.size() * sizeof(T);
+      pInitialData = initialData_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkValidationCacheCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkValidationCacheCreateInfoEXT*>( this );
+    }
+
+    operator VkValidationCacheCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkValidationCacheCreateInfoEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ValidationCacheCreateInfoEXT const& ) const = default;
+#else
+    bool operator==( ValidationCacheCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( initialDataSize == rhs.initialDataSize )
+          && ( pInitialData == rhs.pInitialData );
+    }
+
+    bool operator!=( ValidationCacheCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationCacheCreateInfoEXT;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags = {};
+    size_t initialDataSize = {};
+    const void* pInitialData = {};
+
+  };
+  static_assert( sizeof( ValidationCacheCreateInfoEXT ) == sizeof( VkValidationCacheCreateInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ValidationCacheCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eValidationCacheCreateInfoEXT>
+  {
+    using Type = ValidationCacheCreateInfoEXT;
+  };
+
+  class ValidationCacheEXT
+  {
+  public:
+    using CType = VkValidationCacheEXT;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eValidationCacheEXT;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eValidationCacheEXT;
+
+  public:
+    VULKAN_HPP_CONSTEXPR ValidationCacheEXT() VULKAN_HPP_NOEXCEPT
+      : m_validationCacheEXT(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR ValidationCacheEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_validationCacheEXT(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT ValidationCacheEXT( VkValidationCacheEXT validationCacheEXT ) VULKAN_HPP_NOEXCEPT
+      : m_validationCacheEXT( validationCacheEXT )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    ValidationCacheEXT & operator=(VkValidationCacheEXT validationCacheEXT) VULKAN_HPP_NOEXCEPT
+    {
+      m_validationCacheEXT = validationCacheEXT;
+      return *this;
+    }
+#endif
+
+    ValidationCacheEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_validationCacheEXT = VK_NULL_HANDLE;
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ValidationCacheEXT const& ) const = default;
+#else
+    bool operator==( ValidationCacheEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_validationCacheEXT == rhs.m_validationCacheEXT;
+    }
+
+    bool operator!=(ValidationCacheEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_validationCacheEXT != rhs.m_validationCacheEXT;
+    }
+
+    bool operator<(ValidationCacheEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_validationCacheEXT < rhs.m_validationCacheEXT;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkValidationCacheEXT() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_validationCacheEXT;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_validationCacheEXT != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_validationCacheEXT == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkValidationCacheEXT m_validationCacheEXT;
+  };
+  static_assert( sizeof( VULKAN_HPP_NAMESPACE::ValidationCacheEXT ) == sizeof( VkValidationCacheEXT ), "handle and wrapper have different size!" );
+
+  template <>
+  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eValidationCacheEXT>
+  {
+    using type = VULKAN_HPP_NAMESPACE::ValidationCacheEXT;
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eValidationCacheEXT>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::ValidationCacheEXT;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eValidationCacheEXT>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::ValidationCacheEXT;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::ValidationCacheEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  struct DisplayPowerInfoEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPowerInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DisplayPowerInfoEXT(VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState_ = VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT::eOff) VULKAN_HPP_NOEXCEPT
+    : powerState( powerState_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DisplayPowerInfoEXT( DisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DisplayPowerInfoEXT( VkDisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DisplayPowerInfoEXT & operator=( VkDisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+    DisplayPowerInfoEXT & operator=( DisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DisplayPowerInfoEXT ) );
+      return *this;
+    }
+
+    DisplayPowerInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DisplayPowerInfoEXT & setPowerState( VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState_ ) VULKAN_HPP_NOEXCEPT
+    {
+      powerState = powerState_;
+      return *this;
+    }
+
+
+    operator VkDisplayPowerInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDisplayPowerInfoEXT*>( this );
+    }
+
+    operator VkDisplayPowerInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDisplayPowerInfoEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DisplayPowerInfoEXT const& ) const = default;
+#else
+    bool operator==( DisplayPowerInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( powerState == rhs.powerState );
+    }
+
+    bool operator!=( DisplayPowerInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPowerInfoEXT;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState = VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT::eOff;
+
+  };
+  static_assert( sizeof( DisplayPowerInfoEXT ) == sizeof( VkDisplayPowerInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DisplayPowerInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eDisplayPowerInfoEXT>
+  {
+    using Type = DisplayPowerInfoEXT;
+  };
+
+  struct MappedMemoryRange
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMappedMemoryRange;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR MappedMemoryRange(VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}) VULKAN_HPP_NOEXCEPT
+    : memory( memory_ ), offset( offset_ ), size( size_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR MappedMemoryRange( MappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    MappedMemoryRange( VkMappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    MappedMemoryRange & operator=( VkMappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MappedMemoryRange const *>( &rhs );
+      return *this;
+    }
+
+    MappedMemoryRange & operator=( MappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( MappedMemoryRange ) );
+      return *this;
+    }
+
+    MappedMemoryRange & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    MappedMemoryRange & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memory = memory_;
+      return *this;
+    }
+
+    MappedMemoryRange & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      offset = offset_;
+      return *this;
+    }
+
+    MappedMemoryRange & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
+    {
+      size = size_;
+      return *this;
+    }
+
+
+    operator VkMappedMemoryRange const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMappedMemoryRange*>( this );
+    }
+
+    operator VkMappedMemoryRange &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMappedMemoryRange*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( MappedMemoryRange const& ) const = default;
+#else
+    bool operator==( MappedMemoryRange const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memory == rhs.memory )
+          && ( offset == rhs.offset )
+          && ( size == rhs.size );
+    }
+
+    bool operator!=( MappedMemoryRange const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMappedMemoryRange;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize size = {};
+
+  };
+  static_assert( sizeof( MappedMemoryRange ) == sizeof( VkMappedMemoryRange ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<MappedMemoryRange>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eMappedMemoryRange>
+  {
+    using Type = MappedMemoryRange;
+  };
+
+  struct MemoryRequirements
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR MemoryRequirements(VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize alignment_ = {}, uint32_t memoryTypeBits_ = {}) VULKAN_HPP_NOEXCEPT
+    : size( size_ ), alignment( alignment_ ), memoryTypeBits( memoryTypeBits_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR MemoryRequirements( MemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    MemoryRequirements( VkMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    MemoryRequirements & operator=( VkMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryRequirements const *>( &rhs );
+      return *this;
+    }
+
+    MemoryRequirements & operator=( MemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( MemoryRequirements ) );
+      return *this;
+    }
+
+
+    operator VkMemoryRequirements const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMemoryRequirements*>( this );
+    }
+
+    operator VkMemoryRequirements &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMemoryRequirements*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( MemoryRequirements const& ) const = default;
+#else
+    bool operator==( MemoryRequirements const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( size == rhs.size )
+          && ( alignment == rhs.alignment )
+          && ( memoryTypeBits == rhs.memoryTypeBits );
+    }
+
+    bool operator!=( MemoryRequirements const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::DeviceSize size = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize alignment = {};
+    uint32_t memoryTypeBits = {};
+
+  };
+  static_assert( sizeof( MemoryRequirements ) == sizeof( VkMemoryRequirements ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<MemoryRequirements>::value, "struct wrapper is not a standard layout!" );
+
+  struct MemoryRequirements2
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryRequirements2;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR MemoryRequirements2(VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements_ = {}) VULKAN_HPP_NOEXCEPT
+    : memoryRequirements( memoryRequirements_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR MemoryRequirements2( MemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    MemoryRequirements2( VkMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    MemoryRequirements2 & operator=( VkMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryRequirements2 const *>( &rhs );
+      return *this;
+    }
+
+    MemoryRequirements2 & operator=( MemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( MemoryRequirements2 ) );
+      return *this;
+    }
+
+
+    operator VkMemoryRequirements2 const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMemoryRequirements2*>( this );
+    }
+
+    operator VkMemoryRequirements2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMemoryRequirements2*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( MemoryRequirements2 const& ) const = default;
+#else
+    bool operator==( MemoryRequirements2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memoryRequirements == rhs.memoryRequirements );
+    }
+
+    bool operator!=( MemoryRequirements2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryRequirements2;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements = {};
+
+  };
+  static_assert( sizeof( MemoryRequirements2 ) == sizeof( VkMemoryRequirements2 ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<MemoryRequirements2>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eMemoryRequirements2>
+  {
+    using Type = MemoryRequirements2;
+  };
+  using MemoryRequirements2KHR = MemoryRequirements2;
+
+  struct DeviceGroupPresentCapabilitiesKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupPresentCapabilitiesKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentCapabilitiesKHR(std::array<uint32_t,VK_MAX_DEVICE_GROUP_SIZE> const& presentMask_ = {}, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ = {}) VULKAN_HPP_NOEXCEPT
+    : presentMask( presentMask_ ), modes( modes_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentCapabilitiesKHR( DeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DeviceGroupPresentCapabilitiesKHR( VkDeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DeviceGroupPresentCapabilitiesKHR & operator=( VkDeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR const *>( &rhs );
+      return *this;
+    }
+
+    DeviceGroupPresentCapabilitiesKHR & operator=( DeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DeviceGroupPresentCapabilitiesKHR ) );
+      return *this;
+    }
+
+
+    operator VkDeviceGroupPresentCapabilitiesKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceGroupPresentCapabilitiesKHR*>( this );
+    }
+
+    operator VkDeviceGroupPresentCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DeviceGroupPresentCapabilitiesKHR const& ) const = default;
+#else
+    bool operator==( DeviceGroupPresentCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( presentMask == rhs.presentMask )
+          && ( modes == rhs.modes );
+    }
+
+    bool operator!=( DeviceGroupPresentCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupPresentCapabilitiesKHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, VK_MAX_DEVICE_GROUP_SIZE> presentMask = {};
+    VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes = {};
+
+  };
+  static_assert( sizeof( DeviceGroupPresentCapabilitiesKHR ) == sizeof( VkDeviceGroupPresentCapabilitiesKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DeviceGroupPresentCapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eDeviceGroupPresentCapabilitiesKHR>
+  {
+    using Type = DeviceGroupPresentCapabilitiesKHR;
+  };
+
+  struct PhysicalDeviceSurfaceInfo2KHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSurfaceInfo2KHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceSurfaceInfo2KHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ = {}) VULKAN_HPP_NOEXCEPT
+    : surface( surface_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceSurfaceInfo2KHR( PhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceSurfaceInfo2KHR( VkPhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceSurfaceInfo2KHR & operator=( VkPhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceSurfaceInfo2KHR & operator=( PhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceSurfaceInfo2KHR ) );
+      return *this;
+    }
+
+    PhysicalDeviceSurfaceInfo2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceSurfaceInfo2KHR & setSurface( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ ) VULKAN_HPP_NOEXCEPT
+    {
+      surface = surface_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceSurfaceInfo2KHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( this );
+    }
+
+    operator VkPhysicalDeviceSurfaceInfo2KHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceSurfaceInfo2KHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceSurfaceInfo2KHR const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceSurfaceInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( surface == rhs.surface );
+    }
+
+    bool operator!=( PhysicalDeviceSurfaceInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSurfaceInfo2KHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceSurfaceInfo2KHR ) == sizeof( VkPhysicalDeviceSurfaceInfo2KHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceSurfaceInfo2KHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceSurfaceInfo2KHR>
+  {
+    using Type = PhysicalDeviceSurfaceInfo2KHR;
+  };
+
+  struct DeviceMemoryOpaqueCaptureAddressInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceMemoryOpaqueCaptureAddressInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DeviceMemoryOpaqueCaptureAddressInfo(VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}) VULKAN_HPP_NOEXCEPT
+    : memory( memory_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DeviceMemoryOpaqueCaptureAddressInfo( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DeviceMemoryOpaqueCaptureAddressInfo( VkDeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DeviceMemoryOpaqueCaptureAddressInfo & operator=( VkDeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo const *>( &rhs );
+      return *this;
+    }
+
+    DeviceMemoryOpaqueCaptureAddressInfo & operator=( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DeviceMemoryOpaqueCaptureAddressInfo ) );
+      return *this;
+    }
+
+    DeviceMemoryOpaqueCaptureAddressInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DeviceMemoryOpaqueCaptureAddressInfo & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memory = memory_;
+      return *this;
+    }
+
+
+    operator VkDeviceMemoryOpaqueCaptureAddressInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo*>( this );
+    }
+
+    operator VkDeviceMemoryOpaqueCaptureAddressInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceMemoryOpaqueCaptureAddressInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DeviceMemoryOpaqueCaptureAddressInfo const& ) const = default;
+#else
+    bool operator==( DeviceMemoryOpaqueCaptureAddressInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memory == rhs.memory );
+    }
+
+    bool operator!=( DeviceMemoryOpaqueCaptureAddressInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryOpaqueCaptureAddressInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
+
+  };
+  static_assert( sizeof( DeviceMemoryOpaqueCaptureAddressInfo ) == sizeof( VkDeviceMemoryOpaqueCaptureAddressInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DeviceMemoryOpaqueCaptureAddressInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eDeviceMemoryOpaqueCaptureAddressInfo>
+  {
+    using Type = DeviceMemoryOpaqueCaptureAddressInfo;
+  };
+  using DeviceMemoryOpaqueCaptureAddressInfoKHR = DeviceMemoryOpaqueCaptureAddressInfo;
+
+  struct PresentInfoKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PresentInfoKHR(uint32_t waitSemaphoreCount_ = {}, const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ = {}, uint32_t swapchainCount_ = {}, const VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains_ = {}, const uint32_t* pImageIndices_ = {}, VULKAN_HPP_NAMESPACE::Result* pResults_ = {}) VULKAN_HPP_NOEXCEPT
+    : waitSemaphoreCount( waitSemaphoreCount_ ), pWaitSemaphores( pWaitSemaphores_ ), swapchainCount( swapchainCount_ ), pSwapchains( pSwapchains_ ), pImageIndices( pImageIndices_ ), pResults( pResults_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PresentInfoKHR( PresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PresentInfoKHR( VkPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    PresentInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & waitSemaphores_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const & swapchains_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & imageIndices_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::Result> const & results_ = {} )
+    : waitSemaphoreCount( static_cast<uint32_t>( waitSemaphores_.size() ) ), pWaitSemaphores( waitSemaphores_.data() ), swapchainCount( static_cast<uint32_t>( swapchains_.size() ) ), pSwapchains( swapchains_.data() ), pImageIndices( imageIndices_.data() ), pResults( results_.data() )
+    {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+      VULKAN_HPP_ASSERT( swapchains_.size() == imageIndices_.size() );
+      VULKAN_HPP_ASSERT( results_.empty() || ( swapchains_.size() == results_.size() ) );
+      VULKAN_HPP_ASSERT( results_.empty() || ( imageIndices_.size() == results_.size() ) );
+#else
+      if ( swapchains_.size() != imageIndices_.size() )
+      {
+        throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::PresentInfoKHR::PresentInfoKHR: swapchains_.size() != imageIndices_.size()" );
+      }
+      if ( !results_.empty() && ( swapchains_.size() != results_.size() ) )
+      {
+        throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::PresentInfoKHR::PresentInfoKHR: !results_.empty() && ( swapchains_.size() != results_.size() )" );
+      }
+      if ( !results_.empty() && ( imageIndices_.size() != results_.size() ) )
+      {
+        throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::PresentInfoKHR::PresentInfoKHR: !results_.empty() && ( imageIndices_.size() != results_.size() )" );
+      }
+#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PresentInfoKHR & operator=( VkPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+    PresentInfoKHR & operator=( PresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PresentInfoKHR ) );
+      return *this;
+    }
+
+    PresentInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PresentInfoKHR & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      waitSemaphoreCount = waitSemaphoreCount_;
+      return *this;
+    }
+
+    PresentInfoKHR & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pWaitSemaphores = pWaitSemaphores_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    PresentInfoKHR & setWaitSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & waitSemaphores_ ) VULKAN_HPP_NOEXCEPT
+    {
+      waitSemaphoreCount = static_cast<uint32_t>( waitSemaphores_.size() );
+      pWaitSemaphores = waitSemaphores_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    PresentInfoKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      swapchainCount = swapchainCount_;
+      return *this;
+    }
+
+    PresentInfoKHR & setPSwapchains( const VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pSwapchains = pSwapchains_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    PresentInfoKHR & setSwapchains( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const & swapchains_ ) VULKAN_HPP_NOEXCEPT
+    {
+      swapchainCount = static_cast<uint32_t>( swapchains_.size() );
+      pSwapchains = swapchains_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    PresentInfoKHR & setPImageIndices( const uint32_t* pImageIndices_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pImageIndices = pImageIndices_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    PresentInfoKHR & setImageIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & imageIndices_ ) VULKAN_HPP_NOEXCEPT
+    {
+      swapchainCount = static_cast<uint32_t>( imageIndices_.size() );
+      pImageIndices = imageIndices_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    PresentInfoKHR & setPResults( VULKAN_HPP_NAMESPACE::Result* pResults_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pResults = pResults_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    PresentInfoKHR & setResults( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::Result> const & results_ ) VULKAN_HPP_NOEXCEPT
+    {
+      swapchainCount = static_cast<uint32_t>( results_.size() );
+      pResults = results_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkPresentInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPresentInfoKHR*>( this );
+    }
+
+    operator VkPresentInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPresentInfoKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PresentInfoKHR const& ) const = default;
+#else
+    bool operator==( PresentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( waitSemaphoreCount == rhs.waitSemaphoreCount )
+          && ( pWaitSemaphores == rhs.pWaitSemaphores )
+          && ( swapchainCount == rhs.swapchainCount )
+          && ( pSwapchains == rhs.pSwapchains )
+          && ( pImageIndices == rhs.pImageIndices )
+          && ( pResults == rhs.pResults );
+    }
+
+    bool operator!=( PresentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentInfoKHR;
+    const void* pNext = {};
+    uint32_t waitSemaphoreCount = {};
+    const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores = {};
+    uint32_t swapchainCount = {};
+    const VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains = {};
+    const uint32_t* pImageIndices = {};
+    VULKAN_HPP_NAMESPACE::Result* pResults = {};
+
+  };
+  static_assert( sizeof( PresentInfoKHR ) == sizeof( VkPresentInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PresentInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePresentInfoKHR>
+  {
+    using Type = PresentInfoKHR;
+  };
+
+  struct SubmitInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubmitInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR SubmitInfo(uint32_t waitSemaphoreCount_ = {}, const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ = {}, const VULKAN_HPP_NAMESPACE::PipelineStageFlags* pWaitDstStageMask_ = {}, uint32_t commandBufferCount_ = {}, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers_ = {}, uint32_t signalSemaphoreCount_ = {}, const VULKAN_HPP_NAMESPACE::Semaphore* pSignalSemaphores_ = {}) VULKAN_HPP_NOEXCEPT
+    : waitSemaphoreCount( waitSemaphoreCount_ ), pWaitSemaphores( pWaitSemaphores_ ), pWaitDstStageMask( pWaitDstStageMask_ ), commandBufferCount( commandBufferCount_ ), pCommandBuffers( pCommandBuffers_ ), signalSemaphoreCount( signalSemaphoreCount_ ), pSignalSemaphores( pSignalSemaphores_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SubmitInfo( SubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SubmitInfo( VkSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    SubmitInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & waitSemaphores_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineStageFlags> const & waitDstStageMask_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & signalSemaphores_ = {} )
+    : waitSemaphoreCount( static_cast<uint32_t>( waitSemaphores_.size() ) ), pWaitSemaphores( waitSemaphores_.data() ), pWaitDstStageMask( waitDstStageMask_.data() ), commandBufferCount( static_cast<uint32_t>( commandBuffers_.size() ) ), pCommandBuffers( commandBuffers_.data() ), signalSemaphoreCount( static_cast<uint32_t>( signalSemaphores_.size() ) ), pSignalSemaphores( signalSemaphores_.data() )
+    {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+      VULKAN_HPP_ASSERT( waitSemaphores_.size() == waitDstStageMask_.size() );
+#else
+      if ( waitSemaphores_.size() != waitDstStageMask_.size() )
+      {
+        throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::SubmitInfo::SubmitInfo: waitSemaphores_.size() != waitDstStageMask_.size()" );
+      }
+#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    SubmitInfo & operator=( VkSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubmitInfo const *>( &rhs );
+      return *this;
+    }
+
+    SubmitInfo & operator=( SubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( SubmitInfo ) );
+      return *this;
+    }
+
+    SubmitInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    SubmitInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      waitSemaphoreCount = waitSemaphoreCount_;
+      return *this;
+    }
+
+    SubmitInfo & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pWaitSemaphores = pWaitSemaphores_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    SubmitInfo & setWaitSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & waitSemaphores_ ) VULKAN_HPP_NOEXCEPT
+    {
+      waitSemaphoreCount = static_cast<uint32_t>( waitSemaphores_.size() );
+      pWaitSemaphores = waitSemaphores_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    SubmitInfo & setPWaitDstStageMask( const VULKAN_HPP_NAMESPACE::PipelineStageFlags* pWaitDstStageMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pWaitDstStageMask = pWaitDstStageMask_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    SubmitInfo & setWaitDstStageMask( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineStageFlags> const & waitDstStageMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      waitSemaphoreCount = static_cast<uint32_t>( waitDstStageMask_.size() );
+      pWaitDstStageMask = waitDstStageMask_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    SubmitInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      commandBufferCount = commandBufferCount_;
+      return *this;
+    }
+
+    SubmitInfo & setPCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pCommandBuffers = pCommandBuffers_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    SubmitInfo & setCommandBuffers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers_ ) VULKAN_HPP_NOEXCEPT
+    {
+      commandBufferCount = static_cast<uint32_t>( commandBuffers_.size() );
+      pCommandBuffers = commandBuffers_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    SubmitInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      signalSemaphoreCount = signalSemaphoreCount_;
+      return *this;
+    }
+
+    SubmitInfo & setPSignalSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore* pSignalSemaphores_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pSignalSemaphores = pSignalSemaphores_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    SubmitInfo & setSignalSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & signalSemaphores_ ) VULKAN_HPP_NOEXCEPT
+    {
+      signalSemaphoreCount = static_cast<uint32_t>( signalSemaphores_.size() );
+      pSignalSemaphores = signalSemaphores_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkSubmitInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSubmitInfo*>( this );
+    }
+
+    operator VkSubmitInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSubmitInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( SubmitInfo const& ) const = default;
+#else
+    bool operator==( SubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( waitSemaphoreCount == rhs.waitSemaphoreCount )
+          && ( pWaitSemaphores == rhs.pWaitSemaphores )
+          && ( pWaitDstStageMask == rhs.pWaitDstStageMask )
+          && ( commandBufferCount == rhs.commandBufferCount )
+          && ( pCommandBuffers == rhs.pCommandBuffers )
+          && ( signalSemaphoreCount == rhs.signalSemaphoreCount )
+          && ( pSignalSemaphores == rhs.pSignalSemaphores );
+    }
+
+    bool operator!=( SubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubmitInfo;
+    const void* pNext = {};
+    uint32_t waitSemaphoreCount = {};
+    const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores = {};
+    const VULKAN_HPP_NAMESPACE::PipelineStageFlags* pWaitDstStageMask = {};
+    uint32_t commandBufferCount = {};
+    const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers = {};
+    uint32_t signalSemaphoreCount = {};
+    const VULKAN_HPP_NAMESPACE::Semaphore* pSignalSemaphores = {};
+
+  };
+  static_assert( sizeof( SubmitInfo ) == sizeof( VkSubmitInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SubmitInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eSubmitInfo>
+  {
+    using Type = SubmitInfo;
+  };
+
+  class Queue
+  {
+  public:
+    using CType = VkQueue;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eQueue;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueue;
+
+  public:
+    VULKAN_HPP_CONSTEXPR Queue() VULKAN_HPP_NOEXCEPT
+      : m_queue(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR Queue( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_queue(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT Queue( VkQueue queue ) VULKAN_HPP_NOEXCEPT
+      : m_queue( queue )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    Queue & operator=(VkQueue queue) VULKAN_HPP_NOEXCEPT
+    {
+      m_queue = queue;
+      return *this;
+    }
+#endif
+
+    Queue & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_queue = VK_NULL_HANDLE;
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( Queue const& ) const = default;
+#else
+    bool operator==( Queue const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_queue == rhs.m_queue;
+    }
+
+    bool operator!=(Queue const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_queue != rhs.m_queue;
+    }
+
+    bool operator<(Queue const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_queue < rhs.m_queue;
+    }
+#endif
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getCheckpointDataNV( uint32_t* pCheckpointDataCount, VULKAN_HPP_NAMESPACE::CheckpointDataNV* pCheckpointData VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename CheckpointDataNVAllocator = std::allocator<CheckpointDataNV>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD std::vector<CheckpointDataNV, CheckpointDataNVAllocator> getCheckpointDataNV( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename CheckpointDataNVAllocator = std::allocator<CheckpointDataNV>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = CheckpointDataNVAllocator, typename std::enable_if<std::is_same<typename B::value_type, CheckpointDataNV>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD std::vector<CheckpointDataNV, CheckpointDataNVAllocator> getCheckpointDataNV( CheckpointDataNVAllocator & checkpointDataNVAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result bindSparse( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindSparseInfo* pBindInfo, VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type bindSparse( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindSparseInfo> const & bindInfo, VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void endDebugUtilsLabelEXT( Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR* pPresentInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result presentKHR( const PresentInfoKHR & presentInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result submit( uint32_t submitCount, const VULKAN_HPP_NAMESPACE::SubmitInfo* pSubmits, VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type submit( ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo> const & submits, VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result waitIdle( Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type waitIdle( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkQueue() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_queue;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_queue != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_queue == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkQueue m_queue;
+  };
+  static_assert( sizeof( VULKAN_HPP_NAMESPACE::Queue ) == sizeof( VkQueue ), "handle and wrapper have different size!" );
+
+  template <>
+  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eQueue>
+  {
+    using type = VULKAN_HPP_NAMESPACE::Queue;
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eQueue>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::Queue;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueue>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::Queue;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Queue>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  struct DeviceQueueInfo2
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceQueueInfo2;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DeviceQueueInfo2(VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ = {}, uint32_t queueFamilyIndex_ = {}, uint32_t queueIndex_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), queueFamilyIndex( queueFamilyIndex_ ), queueIndex( queueIndex_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DeviceQueueInfo2( DeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DeviceQueueInfo2( VkDeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DeviceQueueInfo2 & operator=( VkDeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 const *>( &rhs );
+      return *this;
+    }
+
+    DeviceQueueInfo2 & operator=( DeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DeviceQueueInfo2 ) );
+      return *this;
+    }
+
+    DeviceQueueInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DeviceQueueInfo2 & setFlags( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    DeviceQueueInfo2 & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queueFamilyIndex = queueFamilyIndex_;
+      return *this;
+    }
+
+    DeviceQueueInfo2 & setQueueIndex( uint32_t queueIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queueIndex = queueIndex_;
+      return *this;
+    }
+
+
+    operator VkDeviceQueueInfo2 const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceQueueInfo2*>( this );
+    }
+
+    operator VkDeviceQueueInfo2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceQueueInfo2*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DeviceQueueInfo2 const& ) const = default;
+#else
+    bool operator==( DeviceQueueInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( queueFamilyIndex == rhs.queueFamilyIndex )
+          && ( queueIndex == rhs.queueIndex );
+    }
+
+    bool operator!=( DeviceQueueInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueInfo2;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags = {};
+    uint32_t queueFamilyIndex = {};
+    uint32_t queueIndex = {};
+
+  };
+  static_assert( sizeof( DeviceQueueInfo2 ) == sizeof( VkDeviceQueueInfo2 ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DeviceQueueInfo2>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eDeviceQueueInfo2>
+  {
+    using Type = DeviceQueueInfo2;
+  };
+
+  struct FenceGetFdInfoKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFenceGetFdInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR FenceGetFdInfoKHR(VULKAN_HPP_NAMESPACE::Fence fence_ = {}, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd) VULKAN_HPP_NOEXCEPT
+    : fence( fence_ ), handleType( handleType_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR FenceGetFdInfoKHR( FenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    FenceGetFdInfoKHR( VkFenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    FenceGetFdInfoKHR & operator=( VkFenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+    FenceGetFdInfoKHR & operator=( FenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( FenceGetFdInfoKHR ) );
+      return *this;
+    }
+
+    FenceGetFdInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    FenceGetFdInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fence = fence_;
+      return *this;
+    }
+
+    FenceGetFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+
+    operator VkFenceGetFdInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkFenceGetFdInfoKHR*>( this );
+    }
+
+    operator VkFenceGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkFenceGetFdInfoKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( FenceGetFdInfoKHR const& ) const = default;
+#else
+    bool operator==( FenceGetFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( fence == rhs.fence )
+          && ( handleType == rhs.handleType );
+    }
+
+    bool operator!=( FenceGetFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceGetFdInfoKHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Fence fence = {};
+    VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd;
+
+  };
+  static_assert( sizeof( FenceGetFdInfoKHR ) == sizeof( VkFenceGetFdInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<FenceGetFdInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eFenceGetFdInfoKHR>
+  {
+    using Type = FenceGetFdInfoKHR;
+  };
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  struct FenceGetWin32HandleInfoKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFenceGetWin32HandleInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR FenceGetWin32HandleInfoKHR(VULKAN_HPP_NAMESPACE::Fence fence_ = {}, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd) VULKAN_HPP_NOEXCEPT
+    : fence( fence_ ), handleType( handleType_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR FenceGetWin32HandleInfoKHR( FenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    FenceGetWin32HandleInfoKHR( VkFenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    FenceGetWin32HandleInfoKHR & operator=( VkFenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+    FenceGetWin32HandleInfoKHR & operator=( FenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( FenceGetWin32HandleInfoKHR ) );
+      return *this;
+    }
+
+    FenceGetWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    FenceGetWin32HandleInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fence = fence_;
+      return *this;
+    }
+
+    FenceGetWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+
+    operator VkFenceGetWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkFenceGetWin32HandleInfoKHR*>( this );
+    }
+
+    operator VkFenceGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkFenceGetWin32HandleInfoKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( FenceGetWin32HandleInfoKHR const& ) const = default;
+#else
+    bool operator==( FenceGetWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( fence == rhs.fence )
+          && ( handleType == rhs.handleType );
+    }
+
+    bool operator!=( FenceGetWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceGetWin32HandleInfoKHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Fence fence = {};
+    VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd;
+
+  };
+  static_assert( sizeof( FenceGetWin32HandleInfoKHR ) == sizeof( VkFenceGetWin32HandleInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<FenceGetWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eFenceGetWin32HandleInfoKHR>
+  {
+    using Type = FenceGetWin32HandleInfoKHR;
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  struct GeneratedCommandsMemoryRequirementsInfoNV
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeneratedCommandsMemoryRequirementsInfoNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR GeneratedCommandsMemoryRequirementsInfoNV(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ = {}, uint32_t maxSequencesCount_ = {}) VULKAN_HPP_NOEXCEPT
+    : pipelineBindPoint( pipelineBindPoint_ ), pipeline( pipeline_ ), indirectCommandsLayout( indirectCommandsLayout_ ), maxSequencesCount( maxSequencesCount_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR GeneratedCommandsMemoryRequirementsInfoNV( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    GeneratedCommandsMemoryRequirementsInfoNV( VkGeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    GeneratedCommandsMemoryRequirementsInfoNV & operator=( VkGeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV const *>( &rhs );
+      return *this;
+    }
+
+    GeneratedCommandsMemoryRequirementsInfoNV & operator=( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( GeneratedCommandsMemoryRequirementsInfoNV ) );
+      return *this;
+    }
+
+    GeneratedCommandsMemoryRequirementsInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    GeneratedCommandsMemoryRequirementsInfoNV & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipelineBindPoint = pipelineBindPoint_;
+      return *this;
+    }
+
+    GeneratedCommandsMemoryRequirementsInfoNV & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipeline = pipeline_;
+      return *this;
+    }
+
+    GeneratedCommandsMemoryRequirementsInfoNV & setIndirectCommandsLayout( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      indirectCommandsLayout = indirectCommandsLayout_;
+      return *this;
+    }
+
+    GeneratedCommandsMemoryRequirementsInfoNV & setMaxSequencesCount( uint32_t maxSequencesCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxSequencesCount = maxSequencesCount_;
+      return *this;
+    }
+
+
+    operator VkGeneratedCommandsMemoryRequirementsInfoNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV*>( this );
+    }
+
+    operator VkGeneratedCommandsMemoryRequirementsInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkGeneratedCommandsMemoryRequirementsInfoNV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( GeneratedCommandsMemoryRequirementsInfoNV const& ) const = default;
+#else
+    bool operator==( GeneratedCommandsMemoryRequirementsInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pipelineBindPoint == rhs.pipelineBindPoint )
+          && ( pipeline == rhs.pipeline )
+          && ( indirectCommandsLayout == rhs.indirectCommandsLayout )
+          && ( maxSequencesCount == rhs.maxSequencesCount );
+    }
+
+    bool operator!=( GeneratedCommandsMemoryRequirementsInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeneratedCommandsMemoryRequirementsInfoNV;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
+    VULKAN_HPP_NAMESPACE::Pipeline pipeline = {};
+    VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout = {};
+    uint32_t maxSequencesCount = {};
+
+  };
+  static_assert( sizeof( GeneratedCommandsMemoryRequirementsInfoNV ) == sizeof( VkGeneratedCommandsMemoryRequirementsInfoNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<GeneratedCommandsMemoryRequirementsInfoNV>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eGeneratedCommandsMemoryRequirementsInfoNV>
+  {
+    using Type = GeneratedCommandsMemoryRequirementsInfoNV;
+  };
+
+  struct ImageDrmFormatModifierPropertiesEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageDrmFormatModifierPropertiesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierPropertiesEXT(uint64_t drmFormatModifier_ = {}) VULKAN_HPP_NOEXCEPT
+    : drmFormatModifier( drmFormatModifier_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierPropertiesEXT( ImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImageDrmFormatModifierPropertiesEXT( VkImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ImageDrmFormatModifierPropertiesEXT & operator=( VkImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT const *>( &rhs );
+      return *this;
+    }
+
+    ImageDrmFormatModifierPropertiesEXT & operator=( ImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ImageDrmFormatModifierPropertiesEXT ) );
+      return *this;
+    }
+
+
+    operator VkImageDrmFormatModifierPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageDrmFormatModifierPropertiesEXT*>( this );
+    }
+
+    operator VkImageDrmFormatModifierPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ImageDrmFormatModifierPropertiesEXT const& ) const = default;
+#else
+    bool operator==( ImageDrmFormatModifierPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( drmFormatModifier == rhs.drmFormatModifier );
+    }
+
+    bool operator!=( ImageDrmFormatModifierPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierPropertiesEXT;
+    void* pNext = {};
+    uint64_t drmFormatModifier = {};
+
+  };
+  static_assert( sizeof( ImageDrmFormatModifierPropertiesEXT ) == sizeof( VkImageDrmFormatModifierPropertiesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImageDrmFormatModifierPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eImageDrmFormatModifierPropertiesEXT>
+  {
+    using Type = ImageDrmFormatModifierPropertiesEXT;
+  };
+
+  struct ImageMemoryRequirementsInfo2
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageMemoryRequirementsInfo2;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ImageMemoryRequirementsInfo2(VULKAN_HPP_NAMESPACE::Image image_ = {}) VULKAN_HPP_NOEXCEPT
+    : image( image_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImageMemoryRequirementsInfo2( ImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImageMemoryRequirementsInfo2( VkImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ImageMemoryRequirementsInfo2 & operator=( VkImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 const *>( &rhs );
+      return *this;
+    }
+
+    ImageMemoryRequirementsInfo2 & operator=( ImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ImageMemoryRequirementsInfo2 ) );
+      return *this;
+    }
+
+    ImageMemoryRequirementsInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImageMemoryRequirementsInfo2 & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
+    {
+      image = image_;
+      return *this;
+    }
+
+
+    operator VkImageMemoryRequirementsInfo2 const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageMemoryRequirementsInfo2*>( this );
+    }
+
+    operator VkImageMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageMemoryRequirementsInfo2*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ImageMemoryRequirementsInfo2 const& ) const = default;
+#else
+    bool operator==( ImageMemoryRequirementsInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( image == rhs.image );
+    }
+
+    bool operator!=( ImageMemoryRequirementsInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryRequirementsInfo2;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Image image = {};
+
+  };
+  static_assert( sizeof( ImageMemoryRequirementsInfo2 ) == sizeof( VkImageMemoryRequirementsInfo2 ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImageMemoryRequirementsInfo2>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eImageMemoryRequirementsInfo2>
+  {
+    using Type = ImageMemoryRequirementsInfo2;
+  };
+  using ImageMemoryRequirementsInfo2KHR = ImageMemoryRequirementsInfo2;
+
+  struct SparseImageFormatProperties
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR SparseImageFormatProperties(VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, VULKAN_HPP_NAMESPACE::Extent3D imageGranularity_ = {}, VULKAN_HPP_NAMESPACE::SparseImageFormatFlags flags_ = {}) VULKAN_HPP_NOEXCEPT
+    : aspectMask( aspectMask_ ), imageGranularity( imageGranularity_ ), flags( flags_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SparseImageFormatProperties( SparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SparseImageFormatProperties( VkSparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    SparseImageFormatProperties & operator=( VkSparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties const *>( &rhs );
+      return *this;
+    }
+
+    SparseImageFormatProperties & operator=( SparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( SparseImageFormatProperties ) );
+      return *this;
+    }
+
+
+    operator VkSparseImageFormatProperties const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSparseImageFormatProperties*>( this );
+    }
+
+    operator VkSparseImageFormatProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSparseImageFormatProperties*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( SparseImageFormatProperties const& ) const = default;
+#else
+    bool operator==( SparseImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( aspectMask == rhs.aspectMask )
+          && ( imageGranularity == rhs.imageGranularity )
+          && ( flags == rhs.flags );
+    }
+
+    bool operator!=( SparseImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
+    VULKAN_HPP_NAMESPACE::Extent3D imageGranularity = {};
+    VULKAN_HPP_NAMESPACE::SparseImageFormatFlags flags = {};
+
+  };
+  static_assert( sizeof( SparseImageFormatProperties ) == sizeof( VkSparseImageFormatProperties ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SparseImageFormatProperties>::value, "struct wrapper is not a standard layout!" );
+
+  struct SparseImageMemoryRequirements
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements(VULKAN_HPP_NAMESPACE::SparseImageFormatProperties formatProperties_ = {}, uint32_t imageMipTailFirstLod_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailSize_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailStride_ = {}) VULKAN_HPP_NOEXCEPT
+    : formatProperties( formatProperties_ ), imageMipTailFirstLod( imageMipTailFirstLod_ ), imageMipTailSize( imageMipTailSize_ ), imageMipTailOffset( imageMipTailOffset_ ), imageMipTailStride( imageMipTailStride_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements( SparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SparseImageMemoryRequirements( VkSparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    SparseImageMemoryRequirements & operator=( VkSparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements const *>( &rhs );
+      return *this;
+    }
+
+    SparseImageMemoryRequirements & operator=( SparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( SparseImageMemoryRequirements ) );
+      return *this;
+    }
+
+
+    operator VkSparseImageMemoryRequirements const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSparseImageMemoryRequirements*>( this );
+    }
+
+    operator VkSparseImageMemoryRequirements &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSparseImageMemoryRequirements*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( SparseImageMemoryRequirements const& ) const = default;
+#else
+    bool operator==( SparseImageMemoryRequirements const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( formatProperties == rhs.formatProperties )
+          && ( imageMipTailFirstLod == rhs.imageMipTailFirstLod )
+          && ( imageMipTailSize == rhs.imageMipTailSize )
+          && ( imageMipTailOffset == rhs.imageMipTailOffset )
+          && ( imageMipTailStride == rhs.imageMipTailStride );
+    }
+
+    bool operator!=( SparseImageMemoryRequirements const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::SparseImageFormatProperties formatProperties = {};
+    uint32_t imageMipTailFirstLod = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailSize = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailOffset = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailStride = {};
+
+  };
+  static_assert( sizeof( SparseImageMemoryRequirements ) == sizeof( VkSparseImageMemoryRequirements ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SparseImageMemoryRequirements>::value, "struct wrapper is not a standard layout!" );
+
+  struct ImageSparseMemoryRequirementsInfo2
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageSparseMemoryRequirementsInfo2;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ImageSparseMemoryRequirementsInfo2(VULKAN_HPP_NAMESPACE::Image image_ = {}) VULKAN_HPP_NOEXCEPT
+    : image( image_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImageSparseMemoryRequirementsInfo2( ImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImageSparseMemoryRequirementsInfo2( VkImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ImageSparseMemoryRequirementsInfo2 & operator=( VkImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 const *>( &rhs );
+      return *this;
+    }
+
+    ImageSparseMemoryRequirementsInfo2 & operator=( ImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ImageSparseMemoryRequirementsInfo2 ) );
+      return *this;
+    }
+
+    ImageSparseMemoryRequirementsInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImageSparseMemoryRequirementsInfo2 & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
+    {
+      image = image_;
+      return *this;
+    }
+
+
+    operator VkImageSparseMemoryRequirementsInfo2 const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2*>( this );
+    }
+
+    operator VkImageSparseMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageSparseMemoryRequirementsInfo2*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ImageSparseMemoryRequirementsInfo2 const& ) const = default;
+#else
+    bool operator==( ImageSparseMemoryRequirementsInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( image == rhs.image );
+    }
+
+    bool operator!=( ImageSparseMemoryRequirementsInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageSparseMemoryRequirementsInfo2;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Image image = {};
+
+  };
+  static_assert( sizeof( ImageSparseMemoryRequirementsInfo2 ) == sizeof( VkImageSparseMemoryRequirementsInfo2 ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImageSparseMemoryRequirementsInfo2>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eImageSparseMemoryRequirementsInfo2>
+  {
+    using Type = ImageSparseMemoryRequirementsInfo2;
+  };
+  using ImageSparseMemoryRequirementsInfo2KHR = ImageSparseMemoryRequirementsInfo2;
+
+  struct SparseImageMemoryRequirements2
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSparseImageMemoryRequirements2;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements2(VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements memoryRequirements_ = {}) VULKAN_HPP_NOEXCEPT
+    : memoryRequirements( memoryRequirements_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements2( SparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SparseImageMemoryRequirements2( VkSparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    SparseImageMemoryRequirements2 & operator=( VkSparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 const *>( &rhs );
+      return *this;
+    }
+
+    SparseImageMemoryRequirements2 & operator=( SparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( SparseImageMemoryRequirements2 ) );
+      return *this;
+    }
+
+
+    operator VkSparseImageMemoryRequirements2 const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSparseImageMemoryRequirements2*>( this );
+    }
+
+    operator VkSparseImageMemoryRequirements2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSparseImageMemoryRequirements2*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( SparseImageMemoryRequirements2 const& ) const = default;
+#else
+    bool operator==( SparseImageMemoryRequirements2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memoryRequirements == rhs.memoryRequirements );
+    }
+
+    bool operator!=( SparseImageMemoryRequirements2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSparseImageMemoryRequirements2;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements memoryRequirements = {};
+
+  };
+  static_assert( sizeof( SparseImageMemoryRequirements2 ) == sizeof( VkSparseImageMemoryRequirements2 ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SparseImageMemoryRequirements2>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eSparseImageMemoryRequirements2>
+  {
+    using Type = SparseImageMemoryRequirements2;
+  };
+  using SparseImageMemoryRequirements2KHR = SparseImageMemoryRequirements2;
+
+  struct SubresourceLayout
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR SubresourceLayout(VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize rowPitch_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize arrayPitch_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize depthPitch_ = {}) VULKAN_HPP_NOEXCEPT
+    : offset( offset_ ), size( size_ ), rowPitch( rowPitch_ ), arrayPitch( arrayPitch_ ), depthPitch( depthPitch_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SubresourceLayout( SubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SubresourceLayout( VkSubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    SubresourceLayout & operator=( VkSubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubresourceLayout const *>( &rhs );
+      return *this;
+    }
+
+    SubresourceLayout & operator=( SubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( SubresourceLayout ) );
+      return *this;
+    }
+
+
+    operator VkSubresourceLayout const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSubresourceLayout*>( this );
+    }
+
+    operator VkSubresourceLayout &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSubresourceLayout*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( SubresourceLayout const& ) const = default;
+#else
+    bool operator==( SubresourceLayout const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( offset == rhs.offset )
+          && ( size == rhs.size )
+          && ( rowPitch == rhs.rowPitch )
+          && ( arrayPitch == rhs.arrayPitch )
+          && ( depthPitch == rhs.depthPitch );
+    }
+
+    bool operator!=( SubresourceLayout const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize size = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize rowPitch = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize arrayPitch = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize depthPitch = {};
+
+  };
+  static_assert( sizeof( SubresourceLayout ) == sizeof( VkSubresourceLayout ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SubresourceLayout>::value, "struct wrapper is not a standard layout!" );
+
+  struct ImageViewAddressPropertiesNVX
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewAddressPropertiesNVX;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ImageViewAddressPropertiesNVX(VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}) VULKAN_HPP_NOEXCEPT
+    : deviceAddress( deviceAddress_ ), size( size_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImageViewAddressPropertiesNVX( ImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImageViewAddressPropertiesNVX( VkImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ImageViewAddressPropertiesNVX & operator=( VkImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX const *>( &rhs );
+      return *this;
+    }
+
+    ImageViewAddressPropertiesNVX & operator=( ImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ImageViewAddressPropertiesNVX ) );
+      return *this;
+    }
+
+
+    operator VkImageViewAddressPropertiesNVX const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageViewAddressPropertiesNVX*>( this );
+    }
+
+    operator VkImageViewAddressPropertiesNVX &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageViewAddressPropertiesNVX*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ImageViewAddressPropertiesNVX const& ) const = default;
+#else
+    bool operator==( ImageViewAddressPropertiesNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( deviceAddress == rhs.deviceAddress )
+          && ( size == rhs.size );
+    }
+
+    bool operator!=( ImageViewAddressPropertiesNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewAddressPropertiesNVX;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize size = {};
+
+  };
+  static_assert( sizeof( ImageViewAddressPropertiesNVX ) == sizeof( VkImageViewAddressPropertiesNVX ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImageViewAddressPropertiesNVX>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eImageViewAddressPropertiesNVX>
+  {
+    using Type = ImageViewAddressPropertiesNVX;
+  };
+
+  struct ImageViewHandleInfoNVX
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewHandleInfoNVX;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ImageViewHandleInfoNVX(VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, VULKAN_HPP_NAMESPACE::Sampler sampler_ = {}) VULKAN_HPP_NOEXCEPT
+    : imageView( imageView_ ), descriptorType( descriptorType_ ), sampler( sampler_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImageViewHandleInfoNVX( ImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImageViewHandleInfoNVX( VkImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ImageViewHandleInfoNVX & operator=( VkImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX const *>( &rhs );
+      return *this;
+    }
+
+    ImageViewHandleInfoNVX & operator=( ImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ImageViewHandleInfoNVX ) );
+      return *this;
+    }
+
+    ImageViewHandleInfoNVX & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImageViewHandleInfoNVX & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageView = imageView_;
+      return *this;
+    }
+
+    ImageViewHandleInfoNVX & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorType = descriptorType_;
+      return *this;
+    }
+
+    ImageViewHandleInfoNVX & setSampler( VULKAN_HPP_NAMESPACE::Sampler sampler_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sampler = sampler_;
+      return *this;
+    }
+
+
+    operator VkImageViewHandleInfoNVX const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageViewHandleInfoNVX*>( this );
+    }
+
+    operator VkImageViewHandleInfoNVX &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageViewHandleInfoNVX*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ImageViewHandleInfoNVX const& ) const = default;
+#else
+    bool operator==( ImageViewHandleInfoNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( imageView == rhs.imageView )
+          && ( descriptorType == rhs.descriptorType )
+          && ( sampler == rhs.sampler );
+    }
+
+    bool operator!=( ImageViewHandleInfoNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewHandleInfoNVX;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::ImageView imageView = {};
+    VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler;
+    VULKAN_HPP_NAMESPACE::Sampler sampler = {};
+
+  };
+  static_assert( sizeof( ImageViewHandleInfoNVX ) == sizeof( VkImageViewHandleInfoNVX ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImageViewHandleInfoNVX>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eImageViewHandleInfoNVX>
+  {
+    using Type = ImageViewHandleInfoNVX;
+  };
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+  struct MemoryGetAndroidHardwareBufferInfoANDROID
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR MemoryGetAndroidHardwareBufferInfoANDROID(VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}) VULKAN_HPP_NOEXCEPT
+    : memory( memory_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR MemoryGetAndroidHardwareBufferInfoANDROID( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    MemoryGetAndroidHardwareBufferInfoANDROID( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    MemoryGetAndroidHardwareBufferInfoANDROID & operator=( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID const *>( &rhs );
+      return *this;
+    }
+
+    MemoryGetAndroidHardwareBufferInfoANDROID & operator=( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( MemoryGetAndroidHardwareBufferInfoANDROID ) );
+      return *this;
+    }
+
+    MemoryGetAndroidHardwareBufferInfoANDROID & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    MemoryGetAndroidHardwareBufferInfoANDROID & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memory = memory_;
+      return *this;
+    }
+
+
+    operator VkMemoryGetAndroidHardwareBufferInfoANDROID const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID*>( this );
+    }
+
+    operator VkMemoryGetAndroidHardwareBufferInfoANDROID &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMemoryGetAndroidHardwareBufferInfoANDROID*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( MemoryGetAndroidHardwareBufferInfoANDROID const& ) const = default;
+#else
+    bool operator==( MemoryGetAndroidHardwareBufferInfoANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memory == rhs.memory );
+    }
+
+    bool operator!=( MemoryGetAndroidHardwareBufferInfoANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
+
+  };
+  static_assert( sizeof( MemoryGetAndroidHardwareBufferInfoANDROID ) == sizeof( VkMemoryGetAndroidHardwareBufferInfoANDROID ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<MemoryGetAndroidHardwareBufferInfoANDROID>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID>
+  {
+    using Type = MemoryGetAndroidHardwareBufferInfoANDROID;
+  };
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+  struct MemoryGetFdInfoKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetFdInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR MemoryGetFdInfoKHR(VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd) VULKAN_HPP_NOEXCEPT
+    : memory( memory_ ), handleType( handleType_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR MemoryGetFdInfoKHR( MemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    MemoryGetFdInfoKHR( VkMemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    MemoryGetFdInfoKHR & operator=( VkMemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+    MemoryGetFdInfoKHR & operator=( MemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( MemoryGetFdInfoKHR ) );
+      return *this;
+    }
+
+    MemoryGetFdInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    MemoryGetFdInfoKHR & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memory = memory_;
+      return *this;
+    }
+
+    MemoryGetFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+
+    operator VkMemoryGetFdInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMemoryGetFdInfoKHR*>( this );
+    }
+
+    operator VkMemoryGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMemoryGetFdInfoKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( MemoryGetFdInfoKHR const& ) const = default;
+#else
+    bool operator==( MemoryGetFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memory == rhs.memory )
+          && ( handleType == rhs.handleType );
+    }
+
+    bool operator!=( MemoryGetFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetFdInfoKHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
+    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
+
+  };
+  static_assert( sizeof( MemoryGetFdInfoKHR ) == sizeof( VkMemoryGetFdInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<MemoryGetFdInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eMemoryGetFdInfoKHR>
+  {
+    using Type = MemoryGetFdInfoKHR;
+  };
+
+  struct MemoryFdPropertiesKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryFdPropertiesKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR MemoryFdPropertiesKHR(uint32_t memoryTypeBits_ = {}) VULKAN_HPP_NOEXCEPT
+    : memoryTypeBits( memoryTypeBits_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR MemoryFdPropertiesKHR( MemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    MemoryFdPropertiesKHR( VkMemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    MemoryFdPropertiesKHR & operator=( VkMemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR const *>( &rhs );
+      return *this;
+    }
+
+    MemoryFdPropertiesKHR & operator=( MemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( MemoryFdPropertiesKHR ) );
+      return *this;
+    }
+
+
+    operator VkMemoryFdPropertiesKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMemoryFdPropertiesKHR*>( this );
+    }
+
+    operator VkMemoryFdPropertiesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMemoryFdPropertiesKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( MemoryFdPropertiesKHR const& ) const = default;
+#else
+    bool operator==( MemoryFdPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memoryTypeBits == rhs.memoryTypeBits );
+    }
+
+    bool operator!=( MemoryFdPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryFdPropertiesKHR;
+    void* pNext = {};
+    uint32_t memoryTypeBits = {};
+
+  };
+  static_assert( sizeof( MemoryFdPropertiesKHR ) == sizeof( VkMemoryFdPropertiesKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<MemoryFdPropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eMemoryFdPropertiesKHR>
+  {
+    using Type = MemoryFdPropertiesKHR;
+  };
+
+  struct MemoryHostPointerPropertiesEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryHostPointerPropertiesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR MemoryHostPointerPropertiesEXT(uint32_t memoryTypeBits_ = {}) VULKAN_HPP_NOEXCEPT
+    : memoryTypeBits( memoryTypeBits_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR MemoryHostPointerPropertiesEXT( MemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    MemoryHostPointerPropertiesEXT( VkMemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    MemoryHostPointerPropertiesEXT & operator=( VkMemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT const *>( &rhs );
+      return *this;
+    }
+
+    MemoryHostPointerPropertiesEXT & operator=( MemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( MemoryHostPointerPropertiesEXT ) );
+      return *this;
+    }
+
+
+    operator VkMemoryHostPointerPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMemoryHostPointerPropertiesEXT*>( this );
+    }
+
+    operator VkMemoryHostPointerPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMemoryHostPointerPropertiesEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( MemoryHostPointerPropertiesEXT const& ) const = default;
+#else
+    bool operator==( MemoryHostPointerPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memoryTypeBits == rhs.memoryTypeBits );
+    }
+
+    bool operator!=( MemoryHostPointerPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryHostPointerPropertiesEXT;
+    void* pNext = {};
+    uint32_t memoryTypeBits = {};
+
+  };
+  static_assert( sizeof( MemoryHostPointerPropertiesEXT ) == sizeof( VkMemoryHostPointerPropertiesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<MemoryHostPointerPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eMemoryHostPointerPropertiesEXT>
+  {
+    using Type = MemoryHostPointerPropertiesEXT;
+  };
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  struct MemoryGetWin32HandleInfoKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetWin32HandleInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR MemoryGetWin32HandleInfoKHR(VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd) VULKAN_HPP_NOEXCEPT
+    : memory( memory_ ), handleType( handleType_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR MemoryGetWin32HandleInfoKHR( MemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    MemoryGetWin32HandleInfoKHR( VkMemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    MemoryGetWin32HandleInfoKHR & operator=( VkMemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+    MemoryGetWin32HandleInfoKHR & operator=( MemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( MemoryGetWin32HandleInfoKHR ) );
+      return *this;
+    }
+
+    MemoryGetWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    MemoryGetWin32HandleInfoKHR & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memory = memory_;
+      return *this;
+    }
+
+    MemoryGetWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+
+    operator VkMemoryGetWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR*>( this );
+    }
+
+    operator VkMemoryGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMemoryGetWin32HandleInfoKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( MemoryGetWin32HandleInfoKHR const& ) const = default;
+#else
+    bool operator==( MemoryGetWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memory == rhs.memory )
+          && ( handleType == rhs.handleType );
+    }
+
+    bool operator!=( MemoryGetWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetWin32HandleInfoKHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
+    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
+
+  };
+  static_assert( sizeof( MemoryGetWin32HandleInfoKHR ) == sizeof( VkMemoryGetWin32HandleInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<MemoryGetWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eMemoryGetWin32HandleInfoKHR>
+  {
+    using Type = MemoryGetWin32HandleInfoKHR;
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  struct MemoryWin32HandlePropertiesKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryWin32HandlePropertiesKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR MemoryWin32HandlePropertiesKHR(uint32_t memoryTypeBits_ = {}) VULKAN_HPP_NOEXCEPT
+    : memoryTypeBits( memoryTypeBits_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR MemoryWin32HandlePropertiesKHR( MemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    MemoryWin32HandlePropertiesKHR( VkMemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    MemoryWin32HandlePropertiesKHR & operator=( VkMemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR const *>( &rhs );
+      return *this;
+    }
+
+    MemoryWin32HandlePropertiesKHR & operator=( MemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( MemoryWin32HandlePropertiesKHR ) );
+      return *this;
+    }
+
+
+    operator VkMemoryWin32HandlePropertiesKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMemoryWin32HandlePropertiesKHR*>( this );
+    }
+
+    operator VkMemoryWin32HandlePropertiesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMemoryWin32HandlePropertiesKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( MemoryWin32HandlePropertiesKHR const& ) const = default;
+#else
+    bool operator==( MemoryWin32HandlePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memoryTypeBits == rhs.memoryTypeBits );
+    }
+
+    bool operator!=( MemoryWin32HandlePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryWin32HandlePropertiesKHR;
+    void* pNext = {};
+    uint32_t memoryTypeBits = {};
+
+  };
+  static_assert( sizeof( MemoryWin32HandlePropertiesKHR ) == sizeof( VkMemoryWin32HandlePropertiesKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<MemoryWin32HandlePropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eMemoryWin32HandlePropertiesKHR>
+  {
+    using Type = MemoryWin32HandlePropertiesKHR;
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  struct PastPresentationTimingGOOGLE
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PastPresentationTimingGOOGLE(uint32_t presentID_ = {}, uint64_t desiredPresentTime_ = {}, uint64_t actualPresentTime_ = {}, uint64_t earliestPresentTime_ = {}, uint64_t presentMargin_ = {}) VULKAN_HPP_NOEXCEPT
+    : presentID( presentID_ ), desiredPresentTime( desiredPresentTime_ ), actualPresentTime( actualPresentTime_ ), earliestPresentTime( earliestPresentTime_ ), presentMargin( presentMargin_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PastPresentationTimingGOOGLE( PastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PastPresentationTimingGOOGLE( VkPastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PastPresentationTimingGOOGLE & operator=( VkPastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE const *>( &rhs );
+      return *this;
+    }
+
+    PastPresentationTimingGOOGLE & operator=( PastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PastPresentationTimingGOOGLE ) );
+      return *this;
+    }
+
+
+    operator VkPastPresentationTimingGOOGLE const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPastPresentationTimingGOOGLE*>( this );
+    }
+
+    operator VkPastPresentationTimingGOOGLE &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPastPresentationTimingGOOGLE*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PastPresentationTimingGOOGLE const& ) const = default;
+#else
+    bool operator==( PastPresentationTimingGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( presentID == rhs.presentID )
+          && ( desiredPresentTime == rhs.desiredPresentTime )
+          && ( actualPresentTime == rhs.actualPresentTime )
+          && ( earliestPresentTime == rhs.earliestPresentTime )
+          && ( presentMargin == rhs.presentMargin );
+    }
+
+    bool operator!=( PastPresentationTimingGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    uint32_t presentID = {};
+    uint64_t desiredPresentTime = {};
+    uint64_t actualPresentTime = {};
+    uint64_t earliestPresentTime = {};
+    uint64_t presentMargin = {};
+
+  };
+  static_assert( sizeof( PastPresentationTimingGOOGLE ) == sizeof( VkPastPresentationTimingGOOGLE ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PastPresentationTimingGOOGLE>::value, "struct wrapper is not a standard layout!" );
+
+  union PerformanceValueDataINTEL
+  {
+    PerformanceValueDataINTEL( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL const& rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void*>(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL ) );
+    }
+
+    PerformanceValueDataINTEL( uint32_t value32_ = {} )
+      : value32( value32_ )
+    {}
+
+    PerformanceValueDataINTEL( uint64_t value64_ )
+      : value64( value64_ )
+    {}
+
+    PerformanceValueDataINTEL( float valueFloat_ )
+      : valueFloat( valueFloat_ )
+    {}
+
+    PerformanceValueDataINTEL( const char* valueString_ )
+      : valueString( valueString_ )
+    {}
+
+    PerformanceValueDataINTEL & setValue32( uint32_t value32_ ) VULKAN_HPP_NOEXCEPT
+    {
+      value32 = value32_;
+      return *this;
+    }
+
+    PerformanceValueDataINTEL & setValue64( uint64_t value64_ ) VULKAN_HPP_NOEXCEPT
+    {
+      value64 = value64_;
+      return *this;
+    }
+
+    PerformanceValueDataINTEL & setValueFloat( float valueFloat_ ) VULKAN_HPP_NOEXCEPT
+    {
+      valueFloat = valueFloat_;
+      return *this;
+    }
+
+    PerformanceValueDataINTEL & setValueBool( VULKAN_HPP_NAMESPACE::Bool32 valueBool_ ) VULKAN_HPP_NOEXCEPT
+    {
+      valueBool = valueBool_;
+      return *this;
+    }
+
+    PerformanceValueDataINTEL & setValueString( const char* valueString_ ) VULKAN_HPP_NOEXCEPT
+    {
+      valueString = valueString_;
+      return *this;
+    }
+
+    VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL & operator=( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void*>(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL ) );
+      return *this;
+    }
+
+    operator VkPerformanceValueDataINTEL const&() const
+    {
+      return *reinterpret_cast<const VkPerformanceValueDataINTEL*>(this);
+    }
+
+    operator VkPerformanceValueDataINTEL &()
+    {
+      return *reinterpret_cast<VkPerformanceValueDataINTEL*>(this);
+    }
+
+#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
+    uint32_t value32;
+    uint64_t value64;
+    float valueFloat;
+    VULKAN_HPP_NAMESPACE::Bool32 valueBool;
+    const char* valueString;
+#else
+    uint32_t value32;
+    uint64_t value64;
+    float valueFloat;
+    VkBool32 valueBool;
+    const char* valueString;
+#endif  /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
+  };
+
+  struct PerformanceValueINTEL
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    PerformanceValueINTEL(VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL::eUint32, VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL data_ = {}) VULKAN_HPP_NOEXCEPT
+    : type( type_ ), data( data_ )
+    {}
+
+    PerformanceValueINTEL( PerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PerformanceValueINTEL( VkPerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PerformanceValueINTEL & operator=( VkPerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceValueINTEL const *>( &rhs );
+      return *this;
+    }
+
+    PerformanceValueINTEL & operator=( PerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PerformanceValueINTEL ) );
+      return *this;
+    }
+
+    PerformanceValueINTEL & setType( VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT
+    {
+      type = type_;
+      return *this;
+    }
+
+    PerformanceValueINTEL & setData( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL const & data_ ) VULKAN_HPP_NOEXCEPT
+    {
+      data = data_;
+      return *this;
+    }
+
+
+    operator VkPerformanceValueINTEL const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPerformanceValueINTEL*>( this );
+    }
+
+    operator VkPerformanceValueINTEL &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPerformanceValueINTEL*>( this );
+    }
+
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type = VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL::eUint32;
+    VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL data = {};
+
+  };
+  static_assert( sizeof( PerformanceValueINTEL ) == sizeof( VkPerformanceValueINTEL ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PerformanceValueINTEL>::value, "struct wrapper is not a standard layout!" );
+
+  struct PipelineExecutableInfoKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutableInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PipelineExecutableInfoKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, uint32_t executableIndex_ = {}) VULKAN_HPP_NOEXCEPT
+    : pipeline( pipeline_ ), executableIndex( executableIndex_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineExecutableInfoKHR( PipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineExecutableInfoKHR( VkPipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PipelineExecutableInfoKHR & operator=( VkPipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+    PipelineExecutableInfoKHR & operator=( PipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineExecutableInfoKHR ) );
+      return *this;
+    }
+
+    PipelineExecutableInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineExecutableInfoKHR & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipeline = pipeline_;
+      return *this;
+    }
+
+    PipelineExecutableInfoKHR & setExecutableIndex( uint32_t executableIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      executableIndex = executableIndex_;
+      return *this;
+    }
+
+
+    operator VkPipelineExecutableInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineExecutableInfoKHR*>( this );
+    }
+
+    operator VkPipelineExecutableInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineExecutableInfoKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PipelineExecutableInfoKHR const& ) const = default;
+#else
+    bool operator==( PipelineExecutableInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pipeline == rhs.pipeline )
+          && ( executableIndex == rhs.executableIndex );
+    }
+
+    bool operator!=( PipelineExecutableInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableInfoKHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Pipeline pipeline = {};
+    uint32_t executableIndex = {};
+
+  };
+  static_assert( sizeof( PipelineExecutableInfoKHR ) == sizeof( VkPipelineExecutableInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineExecutableInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineExecutableInfoKHR>
+  {
+    using Type = PipelineExecutableInfoKHR;
+  };
+
+  struct PipelineExecutableInternalRepresentationKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutableInternalRepresentationKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInternalRepresentationKHR(std::array<char,VK_MAX_DESCRIPTION_SIZE> const& name_ = {}, std::array<char,VK_MAX_DESCRIPTION_SIZE> const& description_ = {}, VULKAN_HPP_NAMESPACE::Bool32 isText_ = {}, size_t dataSize_ = {}, void* pData_ = {}) VULKAN_HPP_NOEXCEPT
+    : name( name_ ), description( description_ ), isText( isText_ ), dataSize( dataSize_ ), pData( pData_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInternalRepresentationKHR( PipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineExecutableInternalRepresentationKHR( VkPipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    template <typename T>
+    PipelineExecutableInternalRepresentationKHR( std::array<char,VK_MAX_DESCRIPTION_SIZE> const& name_, std::array<char,VK_MAX_DESCRIPTION_SIZE> const& description_, VULKAN_HPP_NAMESPACE::Bool32 isText_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<T> const & data_ )
+    : name( name_ ), description( description_ ), isText( isText_ ), dataSize( data_.size() * sizeof(T) ), pData( data_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PipelineExecutableInternalRepresentationKHR & operator=( VkPipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR const *>( &rhs );
+      return *this;
+    }
+
+    PipelineExecutableInternalRepresentationKHR & operator=( PipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineExecutableInternalRepresentationKHR ) );
+      return *this;
+    }
+
+
+    operator VkPipelineExecutableInternalRepresentationKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineExecutableInternalRepresentationKHR*>( this );
+    }
+
+    operator VkPipelineExecutableInternalRepresentationKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PipelineExecutableInternalRepresentationKHR const& ) const = default;
+#else
+    bool operator==( PipelineExecutableInternalRepresentationKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( name == rhs.name )
+          && ( description == rhs.description )
+          && ( isText == rhs.isText )
+          && ( dataSize == rhs.dataSize )
+          && ( pData == rhs.pData );
+    }
+
+    bool operator!=( PipelineExecutableInternalRepresentationKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableInternalRepresentationKHR;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> name = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description = {};
+    VULKAN_HPP_NAMESPACE::Bool32 isText = {};
+    size_t dataSize = {};
+    void* pData = {};
+
+  };
+  static_assert( sizeof( PipelineExecutableInternalRepresentationKHR ) == sizeof( VkPipelineExecutableInternalRepresentationKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineExecutableInternalRepresentationKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineExecutableInternalRepresentationKHR>
+  {
+    using Type = PipelineExecutableInternalRepresentationKHR;
+  };
+
+  struct PipelineInfoKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PipelineInfoKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}) VULKAN_HPP_NOEXCEPT
+    : pipeline( pipeline_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineInfoKHR( PipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineInfoKHR( VkPipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PipelineInfoKHR & operator=( VkPipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+    PipelineInfoKHR & operator=( PipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineInfoKHR ) );
+      return *this;
+    }
+
+    PipelineInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineInfoKHR & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipeline = pipeline_;
+      return *this;
+    }
+
+
+    operator VkPipelineInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineInfoKHR*>( this );
+    }
+
+    operator VkPipelineInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineInfoKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PipelineInfoKHR const& ) const = default;
+#else
+    bool operator==( PipelineInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pipeline == rhs.pipeline );
+    }
+
+    bool operator!=( PipelineInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineInfoKHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Pipeline pipeline = {};
+
+  };
+  static_assert( sizeof( PipelineInfoKHR ) == sizeof( VkPipelineInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineInfoKHR>
+  {
+    using Type = PipelineInfoKHR;
+  };
+
+  struct PipelineExecutablePropertiesKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutablePropertiesKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR_14 PipelineExecutablePropertiesKHR(VULKAN_HPP_NAMESPACE::ShaderStageFlags stages_ = {}, std::array<char,VK_MAX_DESCRIPTION_SIZE> const& name_ = {}, std::array<char,VK_MAX_DESCRIPTION_SIZE> const& description_ = {}, uint32_t subgroupSize_ = {}) VULKAN_HPP_NOEXCEPT
+    : stages( stages_ ), name( name_ ), description( description_ ), subgroupSize( subgroupSize_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineExecutablePropertiesKHR( PipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineExecutablePropertiesKHR( VkPipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PipelineExecutablePropertiesKHR & operator=( VkPipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR const *>( &rhs );
+      return *this;
+    }
+
+    PipelineExecutablePropertiesKHR & operator=( PipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineExecutablePropertiesKHR ) );
+      return *this;
+    }
+
+
+    operator VkPipelineExecutablePropertiesKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineExecutablePropertiesKHR*>( this );
+    }
+
+    operator VkPipelineExecutablePropertiesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineExecutablePropertiesKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PipelineExecutablePropertiesKHR const& ) const = default;
+#else
+    bool operator==( PipelineExecutablePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( stages == rhs.stages )
+          && ( name == rhs.name )
+          && ( description == rhs.description )
+          && ( subgroupSize == rhs.subgroupSize );
+    }
+
+    bool operator!=( PipelineExecutablePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutablePropertiesKHR;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::ShaderStageFlags stages = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> name = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description = {};
+    uint32_t subgroupSize = {};
+
+  };
+  static_assert( sizeof( PipelineExecutablePropertiesKHR ) == sizeof( VkPipelineExecutablePropertiesKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineExecutablePropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineExecutablePropertiesKHR>
+  {
+    using Type = PipelineExecutablePropertiesKHR;
+  };
+
+  union PipelineExecutableStatisticValueKHR
+  {
+    PipelineExecutableStatisticValueKHR( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR const& rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void*>(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR ) );
+    }
+
+    PipelineExecutableStatisticValueKHR( VULKAN_HPP_NAMESPACE::Bool32 b32_ = {} )
+      : b32( b32_ )
+    {}
+
+    PipelineExecutableStatisticValueKHR( int64_t i64_ )
+      : i64( i64_ )
+    {}
+
+    PipelineExecutableStatisticValueKHR( uint64_t u64_ )
+      : u64( u64_ )
+    {}
+
+    PipelineExecutableStatisticValueKHR( double f64_ )
+      : f64( f64_ )
+    {}
+
+    PipelineExecutableStatisticValueKHR & setB32( VULKAN_HPP_NAMESPACE::Bool32 b32_ ) VULKAN_HPP_NOEXCEPT
+    {
+      b32 = b32_;
+      return *this;
+    }
+
+    PipelineExecutableStatisticValueKHR & setI64( int64_t i64_ ) VULKAN_HPP_NOEXCEPT
+    {
+      i64 = i64_;
+      return *this;
+    }
+
+    PipelineExecutableStatisticValueKHR & setU64( uint64_t u64_ ) VULKAN_HPP_NOEXCEPT
+    {
+      u64 = u64_;
+      return *this;
+    }
+
+    PipelineExecutableStatisticValueKHR & setF64( double f64_ ) VULKAN_HPP_NOEXCEPT
+    {
+      f64 = f64_;
+      return *this;
+    }
+
+    VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR & operator=( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void*>(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR ) );
+      return *this;
+    }
+
+    operator VkPipelineExecutableStatisticValueKHR const&() const
+    {
+      return *reinterpret_cast<const VkPipelineExecutableStatisticValueKHR*>(this);
+    }
+
+    operator VkPipelineExecutableStatisticValueKHR &()
+    {
+      return *reinterpret_cast<VkPipelineExecutableStatisticValueKHR*>(this);
+    }
+
+#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
+    VULKAN_HPP_NAMESPACE::Bool32 b32;
+    int64_t i64;
+    uint64_t u64;
+    double f64;
+#else
+    VkBool32 b32;
+    int64_t i64;
+    uint64_t u64;
+    double f64;
+#endif  /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
+  };
+
+  struct PipelineExecutableStatisticKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutableStatisticKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    PipelineExecutableStatisticKHR(std::array<char,VK_MAX_DESCRIPTION_SIZE> const& name_ = {}, std::array<char,VK_MAX_DESCRIPTION_SIZE> const& description_ = {}, VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR format_ = VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR::eBool32, VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR value_ = {}) VULKAN_HPP_NOEXCEPT
+    : name( name_ ), description( description_ ), format( format_ ), value( value_ )
+    {}
+
+    PipelineExecutableStatisticKHR( PipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineExecutableStatisticKHR( VkPipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PipelineExecutableStatisticKHR & operator=( VkPipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR const *>( &rhs );
+      return *this;
+    }
+
+    PipelineExecutableStatisticKHR & operator=( PipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineExecutableStatisticKHR ) );
+      return *this;
+    }
+
+
+    operator VkPipelineExecutableStatisticKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineExecutableStatisticKHR*>( this );
+    }
+
+    operator VkPipelineExecutableStatisticKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineExecutableStatisticKHR*>( this );
+    }
+
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableStatisticKHR;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> name = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description = {};
+    VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR format = VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR::eBool32;
+    VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR value = {};
+
+  };
+  static_assert( sizeof( PipelineExecutableStatisticKHR ) == sizeof( VkPipelineExecutableStatisticKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineExecutableStatisticKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineExecutableStatisticKHR>
+  {
+    using Type = PipelineExecutableStatisticKHR;
+  };
+
+  struct RefreshCycleDurationGOOGLE
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR RefreshCycleDurationGOOGLE(uint64_t refreshDuration_ = {}) VULKAN_HPP_NOEXCEPT
+    : refreshDuration( refreshDuration_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR RefreshCycleDurationGOOGLE( RefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    RefreshCycleDurationGOOGLE( VkRefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    RefreshCycleDurationGOOGLE & operator=( VkRefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE const *>( &rhs );
+      return *this;
+    }
+
+    RefreshCycleDurationGOOGLE & operator=( RefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( RefreshCycleDurationGOOGLE ) );
+      return *this;
+    }
+
+
+    operator VkRefreshCycleDurationGOOGLE const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkRefreshCycleDurationGOOGLE*>( this );
+    }
+
+    operator VkRefreshCycleDurationGOOGLE &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkRefreshCycleDurationGOOGLE*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( RefreshCycleDurationGOOGLE const& ) const = default;
+#else
+    bool operator==( RefreshCycleDurationGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( refreshDuration == rhs.refreshDuration );
+    }
+
+    bool operator!=( RefreshCycleDurationGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    uint64_t refreshDuration = {};
+
+  };
+  static_assert( sizeof( RefreshCycleDurationGOOGLE ) == sizeof( VkRefreshCycleDurationGOOGLE ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<RefreshCycleDurationGOOGLE>::value, "struct wrapper is not a standard layout!" );
+
+  struct SemaphoreGetFdInfoKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreGetFdInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR SemaphoreGetFdInfoKHR(VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd) VULKAN_HPP_NOEXCEPT
+    : semaphore( semaphore_ ), handleType( handleType_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SemaphoreGetFdInfoKHR( SemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SemaphoreGetFdInfoKHR( VkSemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    SemaphoreGetFdInfoKHR & operator=( VkSemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+    SemaphoreGetFdInfoKHR & operator=( SemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( SemaphoreGetFdInfoKHR ) );
+      return *this;
+    }
+
+    SemaphoreGetFdInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    SemaphoreGetFdInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
+    {
+      semaphore = semaphore_;
+      return *this;
+    }
+
+    SemaphoreGetFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+
+    operator VkSemaphoreGetFdInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSemaphoreGetFdInfoKHR*>( this );
+    }
+
+    operator VkSemaphoreGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSemaphoreGetFdInfoKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( SemaphoreGetFdInfoKHR const& ) const = default;
+#else
+    bool operator==( SemaphoreGetFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( semaphore == rhs.semaphore )
+          && ( handleType == rhs.handleType );
+    }
+
+    bool operator!=( SemaphoreGetFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetFdInfoKHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
+    VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
+
+  };
+  static_assert( sizeof( SemaphoreGetFdInfoKHR ) == sizeof( VkSemaphoreGetFdInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SemaphoreGetFdInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eSemaphoreGetFdInfoKHR>
+  {
+    using Type = SemaphoreGetFdInfoKHR;
+  };
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  struct SemaphoreGetWin32HandleInfoKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreGetWin32HandleInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR SemaphoreGetWin32HandleInfoKHR(VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd) VULKAN_HPP_NOEXCEPT
+    : semaphore( semaphore_ ), handleType( handleType_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SemaphoreGetWin32HandleInfoKHR( SemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SemaphoreGetWin32HandleInfoKHR( VkSemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    SemaphoreGetWin32HandleInfoKHR & operator=( VkSemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+    SemaphoreGetWin32HandleInfoKHR & operator=( SemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( SemaphoreGetWin32HandleInfoKHR ) );
+      return *this;
+    }
+
+    SemaphoreGetWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    SemaphoreGetWin32HandleInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
+    {
+      semaphore = semaphore_;
+      return *this;
+    }
+
+    SemaphoreGetWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+
+    operator VkSemaphoreGetWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR*>( this );
+    }
+
+    operator VkSemaphoreGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSemaphoreGetWin32HandleInfoKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( SemaphoreGetWin32HandleInfoKHR const& ) const = default;
+#else
+    bool operator==( SemaphoreGetWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( semaphore == rhs.semaphore )
+          && ( handleType == rhs.handleType );
+    }
+
+    bool operator!=( SemaphoreGetWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetWin32HandleInfoKHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
+    VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
+
+  };
+  static_assert( sizeof( SemaphoreGetWin32HandleInfoKHR ) == sizeof( VkSemaphoreGetWin32HandleInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SemaphoreGetWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eSemaphoreGetWin32HandleInfoKHR>
+  {
+    using Type = SemaphoreGetWin32HandleInfoKHR;
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  struct ImportFenceFdInfoKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportFenceFdInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ImportFenceFdInfoKHR(VULKAN_HPP_NAMESPACE::Fence fence_ = {}, VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, int fd_ = {}) VULKAN_HPP_NOEXCEPT
+    : fence( fence_ ), flags( flags_ ), handleType( handleType_ ), fd( fd_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImportFenceFdInfoKHR( ImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImportFenceFdInfoKHR( VkImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ImportFenceFdInfoKHR & operator=( VkImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+    ImportFenceFdInfoKHR & operator=( ImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ImportFenceFdInfoKHR ) );
+      return *this;
+    }
+
+    ImportFenceFdInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImportFenceFdInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fence = fence_;
+      return *this;
+    }
+
+    ImportFenceFdInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    ImportFenceFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+    ImportFenceFdInfoKHR & setFd( int fd_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fd = fd_;
+      return *this;
+    }
+
+
+    operator VkImportFenceFdInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImportFenceFdInfoKHR*>( this );
+    }
+
+    operator VkImportFenceFdInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImportFenceFdInfoKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ImportFenceFdInfoKHR const& ) const = default;
+#else
+    bool operator==( ImportFenceFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( fence == rhs.fence )
+          && ( flags == rhs.flags )
+          && ( handleType == rhs.handleType )
+          && ( fd == rhs.fd );
+    }
+
+    bool operator!=( ImportFenceFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportFenceFdInfoKHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Fence fence = {};
+    VULKAN_HPP_NAMESPACE::FenceImportFlags flags = {};
+    VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd;
+    int fd = {};
+
+  };
+  static_assert( sizeof( ImportFenceFdInfoKHR ) == sizeof( VkImportFenceFdInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImportFenceFdInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eImportFenceFdInfoKHR>
+  {
+    using Type = ImportFenceFdInfoKHR;
+  };
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  struct ImportFenceWin32HandleInfoKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportFenceWin32HandleInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ImportFenceWin32HandleInfoKHR(VULKAN_HPP_NAMESPACE::Fence fence_ = {}, VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, HANDLE handle_ = {}, LPCWSTR name_ = {}) VULKAN_HPP_NOEXCEPT
+    : fence( fence_ ), flags( flags_ ), handleType( handleType_ ), handle( handle_ ), name( name_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImportFenceWin32HandleInfoKHR( ImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImportFenceWin32HandleInfoKHR( VkImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ImportFenceWin32HandleInfoKHR & operator=( VkImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+    ImportFenceWin32HandleInfoKHR & operator=( ImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ImportFenceWin32HandleInfoKHR ) );
+      return *this;
+    }
+
+    ImportFenceWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImportFenceWin32HandleInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fence = fence_;
+      return *this;
+    }
+
+    ImportFenceWin32HandleInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    ImportFenceWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+    ImportFenceWin32HandleInfoKHR & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handle = handle_;
+      return *this;
+    }
+
+    ImportFenceWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT
+    {
+      name = name_;
+      return *this;
+    }
+
+
+    operator VkImportFenceWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImportFenceWin32HandleInfoKHR*>( this );
+    }
+
+    operator VkImportFenceWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImportFenceWin32HandleInfoKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ImportFenceWin32HandleInfoKHR const& ) const = default;
+#else
+    bool operator==( ImportFenceWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( fence == rhs.fence )
+          && ( flags == rhs.flags )
+          && ( handleType == rhs.handleType )
+          && ( handle == rhs.handle )
+          && ( name == rhs.name );
+    }
+
+    bool operator!=( ImportFenceWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportFenceWin32HandleInfoKHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Fence fence = {};
+    VULKAN_HPP_NAMESPACE::FenceImportFlags flags = {};
+    VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd;
+    HANDLE handle = {};
+    LPCWSTR name = {};
+
+  };
+  static_assert( sizeof( ImportFenceWin32HandleInfoKHR ) == sizeof( VkImportFenceWin32HandleInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImportFenceWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eImportFenceWin32HandleInfoKHR>
+  {
+    using Type = ImportFenceWin32HandleInfoKHR;
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  struct ImportSemaphoreFdInfoKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportSemaphoreFdInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ImportSemaphoreFdInfoKHR(VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, int fd_ = {}) VULKAN_HPP_NOEXCEPT
+    : semaphore( semaphore_ ), flags( flags_ ), handleType( handleType_ ), fd( fd_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImportSemaphoreFdInfoKHR( ImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImportSemaphoreFdInfoKHR( VkImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ImportSemaphoreFdInfoKHR & operator=( VkImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+    ImportSemaphoreFdInfoKHR & operator=( ImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ImportSemaphoreFdInfoKHR ) );
+      return *this;
+    }
+
+    ImportSemaphoreFdInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImportSemaphoreFdInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
+    {
+      semaphore = semaphore_;
+      return *this;
+    }
+
+    ImportSemaphoreFdInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    ImportSemaphoreFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+    ImportSemaphoreFdInfoKHR & setFd( int fd_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fd = fd_;
+      return *this;
+    }
+
+
+    operator VkImportSemaphoreFdInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImportSemaphoreFdInfoKHR*>( this );
+    }
+
+    operator VkImportSemaphoreFdInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImportSemaphoreFdInfoKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ImportSemaphoreFdInfoKHR const& ) const = default;
+#else
+    bool operator==( ImportSemaphoreFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( semaphore == rhs.semaphore )
+          && ( flags == rhs.flags )
+          && ( handleType == rhs.handleType )
+          && ( fd == rhs.fd );
+    }
+
+    bool operator!=( ImportSemaphoreFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportSemaphoreFdInfoKHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
+    VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags = {};
+    VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
+    int fd = {};
+
+  };
+  static_assert( sizeof( ImportSemaphoreFdInfoKHR ) == sizeof( VkImportSemaphoreFdInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImportSemaphoreFdInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eImportSemaphoreFdInfoKHR>
+  {
+    using Type = ImportSemaphoreFdInfoKHR;
+  };
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  struct ImportSemaphoreWin32HandleInfoKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportSemaphoreWin32HandleInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ImportSemaphoreWin32HandleInfoKHR(VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, HANDLE handle_ = {}, LPCWSTR name_ = {}) VULKAN_HPP_NOEXCEPT
+    : semaphore( semaphore_ ), flags( flags_ ), handleType( handleType_ ), handle( handle_ ), name( name_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImportSemaphoreWin32HandleInfoKHR( ImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImportSemaphoreWin32HandleInfoKHR( VkImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ImportSemaphoreWin32HandleInfoKHR & operator=( VkImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+    ImportSemaphoreWin32HandleInfoKHR & operator=( ImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ImportSemaphoreWin32HandleInfoKHR ) );
+      return *this;
+    }
+
+    ImportSemaphoreWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImportSemaphoreWin32HandleInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
+    {
+      semaphore = semaphore_;
+      return *this;
+    }
+
+    ImportSemaphoreWin32HandleInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    ImportSemaphoreWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+    ImportSemaphoreWin32HandleInfoKHR & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handle = handle_;
+      return *this;
+    }
+
+    ImportSemaphoreWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT
+    {
+      name = name_;
+      return *this;
+    }
+
+
+    operator VkImportSemaphoreWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR*>( this );
+    }
+
+    operator VkImportSemaphoreWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImportSemaphoreWin32HandleInfoKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ImportSemaphoreWin32HandleInfoKHR const& ) const = default;
+#else
+    bool operator==( ImportSemaphoreWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( semaphore == rhs.semaphore )
+          && ( flags == rhs.flags )
+          && ( handleType == rhs.handleType )
+          && ( handle == rhs.handle )
+          && ( name == rhs.name );
+    }
+
+    bool operator!=( ImportSemaphoreWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportSemaphoreWin32HandleInfoKHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
+    VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags = {};
+    VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
+    HANDLE handle = {};
+    LPCWSTR name = {};
+
+  };
+  static_assert( sizeof( ImportSemaphoreWin32HandleInfoKHR ) == sizeof( VkImportSemaphoreWin32HandleInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImportSemaphoreWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eImportSemaphoreWin32HandleInfoKHR>
+  {
+    using Type = ImportSemaphoreWin32HandleInfoKHR;
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  struct InitializePerformanceApiInfoINTEL
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eInitializePerformanceApiInfoINTEL;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR InitializePerformanceApiInfoINTEL(void* pUserData_ = {}) VULKAN_HPP_NOEXCEPT
+    : pUserData( pUserData_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR InitializePerformanceApiInfoINTEL( InitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    InitializePerformanceApiInfoINTEL( VkInitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    InitializePerformanceApiInfoINTEL & operator=( VkInitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL const *>( &rhs );
+      return *this;
+    }
+
+    InitializePerformanceApiInfoINTEL & operator=( InitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( InitializePerformanceApiInfoINTEL ) );
+      return *this;
+    }
+
+    InitializePerformanceApiInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    InitializePerformanceApiInfoINTEL & setPUserData( void* pUserData_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pUserData = pUserData_;
+      return *this;
+    }
+
+
+    operator VkInitializePerformanceApiInfoINTEL const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkInitializePerformanceApiInfoINTEL*>( this );
+    }
+
+    operator VkInitializePerformanceApiInfoINTEL &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkInitializePerformanceApiInfoINTEL*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( InitializePerformanceApiInfoINTEL const& ) const = default;
+#else
+    bool operator==( InitializePerformanceApiInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pUserData == rhs.pUserData );
+    }
+
+    bool operator!=( InitializePerformanceApiInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eInitializePerformanceApiInfoINTEL;
+    const void* pNext = {};
+    void* pUserData = {};
+
+  };
+  static_assert( sizeof( InitializePerformanceApiInfoINTEL ) == sizeof( VkInitializePerformanceApiInfoINTEL ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<InitializePerformanceApiInfoINTEL>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eInitializePerformanceApiInfoINTEL>
+  {
+    using Type = InitializePerformanceApiInfoINTEL;
+  };
+
+  struct DisplayEventInfoEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayEventInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DisplayEventInfoEXT(VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent_ = VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT::eFirstPixelOut) VULKAN_HPP_NOEXCEPT
+    : displayEvent( displayEvent_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DisplayEventInfoEXT( DisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DisplayEventInfoEXT( VkDisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DisplayEventInfoEXT & operator=( VkDisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+    DisplayEventInfoEXT & operator=( DisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DisplayEventInfoEXT ) );
+      return *this;
+    }
+
+    DisplayEventInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DisplayEventInfoEXT & setDisplayEvent( VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent_ ) VULKAN_HPP_NOEXCEPT
+    {
+      displayEvent = displayEvent_;
+      return *this;
+    }
+
+
+    operator VkDisplayEventInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDisplayEventInfoEXT*>( this );
+    }
+
+    operator VkDisplayEventInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDisplayEventInfoEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DisplayEventInfoEXT const& ) const = default;
+#else
+    bool operator==( DisplayEventInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( displayEvent == rhs.displayEvent );
+    }
+
+    bool operator!=( DisplayEventInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayEventInfoEXT;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent = VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT::eFirstPixelOut;
+
+  };
+  static_assert( sizeof( DisplayEventInfoEXT ) == sizeof( VkDisplayEventInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DisplayEventInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eDisplayEventInfoEXT>
+  {
+    using Type = DisplayEventInfoEXT;
+  };
+
+  struct XYColorEXT
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR XYColorEXT(float x_ = {}, float y_ = {}) VULKAN_HPP_NOEXCEPT
+    : x( x_ ), y( y_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR XYColorEXT( XYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    XYColorEXT( VkXYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    XYColorEXT & operator=( VkXYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::XYColorEXT const *>( &rhs );
+      return *this;
+    }
+
+    XYColorEXT & operator=( XYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( XYColorEXT ) );
+      return *this;
+    }
+
+    XYColorEXT & setX( float x_ ) VULKAN_HPP_NOEXCEPT
+    {
+      x = x_;
+      return *this;
+    }
+
+    XYColorEXT & setY( float y_ ) VULKAN_HPP_NOEXCEPT
+    {
+      y = y_;
+      return *this;
+    }
+
+
+    operator VkXYColorEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkXYColorEXT*>( this );
+    }
+
+    operator VkXYColorEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkXYColorEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( XYColorEXT const& ) const = default;
+#else
+    bool operator==( XYColorEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( x == rhs.x )
+          && ( y == rhs.y );
+    }
+
+    bool operator!=( XYColorEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    float x = {};
+    float y = {};
+
+  };
+  static_assert( sizeof( XYColorEXT ) == sizeof( VkXYColorEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<XYColorEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct HdrMetadataEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHdrMetadataEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR HdrMetadataEXT(VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryRed_ = {}, VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryGreen_ = {}, VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryBlue_ = {}, VULKAN_HPP_NAMESPACE::XYColorEXT whitePoint_ = {}, float maxLuminance_ = {}, float minLuminance_ = {}, float maxContentLightLevel_ = {}, float maxFrameAverageLightLevel_ = {}) VULKAN_HPP_NOEXCEPT
+    : displayPrimaryRed( displayPrimaryRed_ ), displayPrimaryGreen( displayPrimaryGreen_ ), displayPrimaryBlue( displayPrimaryBlue_ ), whitePoint( whitePoint_ ), maxLuminance( maxLuminance_ ), minLuminance( minLuminance_ ), maxContentLightLevel( maxContentLightLevel_ ), maxFrameAverageLightLevel( maxFrameAverageLightLevel_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR HdrMetadataEXT( HdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    HdrMetadataEXT( VkHdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    HdrMetadataEXT & operator=( VkHdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::HdrMetadataEXT const *>( &rhs );
+      return *this;
+    }
+
+    HdrMetadataEXT & operator=( HdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( HdrMetadataEXT ) );
+      return *this;
+    }
+
+    HdrMetadataEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    HdrMetadataEXT & setDisplayPrimaryRed( VULKAN_HPP_NAMESPACE::XYColorEXT const & displayPrimaryRed_ ) VULKAN_HPP_NOEXCEPT
+    {
+      displayPrimaryRed = displayPrimaryRed_;
+      return *this;
+    }
+
+    HdrMetadataEXT & setDisplayPrimaryGreen( VULKAN_HPP_NAMESPACE::XYColorEXT const & displayPrimaryGreen_ ) VULKAN_HPP_NOEXCEPT
+    {
+      displayPrimaryGreen = displayPrimaryGreen_;
+      return *this;
+    }
+
+    HdrMetadataEXT & setDisplayPrimaryBlue( VULKAN_HPP_NAMESPACE::XYColorEXT const & displayPrimaryBlue_ ) VULKAN_HPP_NOEXCEPT
+    {
+      displayPrimaryBlue = displayPrimaryBlue_;
+      return *this;
+    }
+
+    HdrMetadataEXT & setWhitePoint( VULKAN_HPP_NAMESPACE::XYColorEXT const & whitePoint_ ) VULKAN_HPP_NOEXCEPT
+    {
+      whitePoint = whitePoint_;
+      return *this;
+    }
+
+    HdrMetadataEXT & setMaxLuminance( float maxLuminance_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxLuminance = maxLuminance_;
+      return *this;
+    }
+
+    HdrMetadataEXT & setMinLuminance( float minLuminance_ ) VULKAN_HPP_NOEXCEPT
+    {
+      minLuminance = minLuminance_;
+      return *this;
+    }
+
+    HdrMetadataEXT & setMaxContentLightLevel( float maxContentLightLevel_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxContentLightLevel = maxContentLightLevel_;
+      return *this;
+    }
+
+    HdrMetadataEXT & setMaxFrameAverageLightLevel( float maxFrameAverageLightLevel_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxFrameAverageLightLevel = maxFrameAverageLightLevel_;
+      return *this;
+    }
+
+
+    operator VkHdrMetadataEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkHdrMetadataEXT*>( this );
+    }
+
+    operator VkHdrMetadataEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkHdrMetadataEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( HdrMetadataEXT const& ) const = default;
+#else
+    bool operator==( HdrMetadataEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( displayPrimaryRed == rhs.displayPrimaryRed )
+          && ( displayPrimaryGreen == rhs.displayPrimaryGreen )
+          && ( displayPrimaryBlue == rhs.displayPrimaryBlue )
+          && ( whitePoint == rhs.whitePoint )
+          && ( maxLuminance == rhs.maxLuminance )
+          && ( minLuminance == rhs.minLuminance )
+          && ( maxContentLightLevel == rhs.maxContentLightLevel )
+          && ( maxFrameAverageLightLevel == rhs.maxFrameAverageLightLevel );
+    }
+
+    bool operator!=( HdrMetadataEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHdrMetadataEXT;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryRed = {};
+    VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryGreen = {};
+    VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryBlue = {};
+    VULKAN_HPP_NAMESPACE::XYColorEXT whitePoint = {};
+    float maxLuminance = {};
+    float minLuminance = {};
+    float maxContentLightLevel = {};
+    float maxFrameAverageLightLevel = {};
+
+  };
+  static_assert( sizeof( HdrMetadataEXT ) == sizeof( VkHdrMetadataEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<HdrMetadataEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eHdrMetadataEXT>
+  {
+    using Type = HdrMetadataEXT;
+  };
+
+  struct SemaphoreSignalInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreSignalInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR SemaphoreSignalInfo(VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, uint64_t value_ = {}) VULKAN_HPP_NOEXCEPT
+    : semaphore( semaphore_ ), value( value_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SemaphoreSignalInfo( SemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SemaphoreSignalInfo( VkSemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    SemaphoreSignalInfo & operator=( VkSemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo const *>( &rhs );
+      return *this;
+    }
+
+    SemaphoreSignalInfo & operator=( SemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( SemaphoreSignalInfo ) );
+      return *this;
+    }
+
+    SemaphoreSignalInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    SemaphoreSignalInfo & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
+    {
+      semaphore = semaphore_;
+      return *this;
+    }
+
+    SemaphoreSignalInfo & setValue( uint64_t value_ ) VULKAN_HPP_NOEXCEPT
+    {
+      value = value_;
+      return *this;
+    }
+
+
+    operator VkSemaphoreSignalInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSemaphoreSignalInfo*>( this );
+    }
+
+    operator VkSemaphoreSignalInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSemaphoreSignalInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( SemaphoreSignalInfo const& ) const = default;
+#else
+    bool operator==( SemaphoreSignalInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( semaphore == rhs.semaphore )
+          && ( value == rhs.value );
+    }
+
+    bool operator!=( SemaphoreSignalInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreSignalInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
+    uint64_t value = {};
+
+  };
+  static_assert( sizeof( SemaphoreSignalInfo ) == sizeof( VkSemaphoreSignalInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SemaphoreSignalInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eSemaphoreSignalInfo>
+  {
+    using Type = SemaphoreSignalInfo;
+  };
+  using SemaphoreSignalInfoKHR = SemaphoreSignalInfo;
+
+  struct SemaphoreWaitInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreWaitInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR SemaphoreWaitInfo(VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_ = {}, uint32_t semaphoreCount_ = {}, const VULKAN_HPP_NAMESPACE::Semaphore* pSemaphores_ = {}, const uint64_t* pValues_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), semaphoreCount( semaphoreCount_ ), pSemaphores( pSemaphores_ ), pValues( pValues_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SemaphoreWaitInfo( SemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SemaphoreWaitInfo( VkSemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    SemaphoreWaitInfo( VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & semaphores_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & values_ = {} )
+    : flags( flags_ ), semaphoreCount( static_cast<uint32_t>( semaphores_.size() ) ), pSemaphores( semaphores_.data() ), pValues( values_.data() )
+    {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+      VULKAN_HPP_ASSERT( semaphores_.size() == values_.size() );
+#else
+      if ( semaphores_.size() != values_.size() )
+      {
+        throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::SemaphoreWaitInfo::SemaphoreWaitInfo: semaphores_.size() != values_.size()" );
+      }
+#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    SemaphoreWaitInfo & operator=( VkSemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo const *>( &rhs );
+      return *this;
+    }
+
+    SemaphoreWaitInfo & operator=( SemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( SemaphoreWaitInfo ) );
+      return *this;
+    }
+
+    SemaphoreWaitInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    SemaphoreWaitInfo & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    SemaphoreWaitInfo & setSemaphoreCount( uint32_t semaphoreCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      semaphoreCount = semaphoreCount_;
+      return *this;
+    }
+
+    SemaphoreWaitInfo & setPSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore* pSemaphores_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pSemaphores = pSemaphores_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    SemaphoreWaitInfo & setSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & semaphores_ ) VULKAN_HPP_NOEXCEPT
+    {
+      semaphoreCount = static_cast<uint32_t>( semaphores_.size() );
+      pSemaphores = semaphores_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    SemaphoreWaitInfo & setPValues( const uint64_t* pValues_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pValues = pValues_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    SemaphoreWaitInfo & setValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & values_ ) VULKAN_HPP_NOEXCEPT
+    {
+      semaphoreCount = static_cast<uint32_t>( values_.size() );
+      pValues = values_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkSemaphoreWaitInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSemaphoreWaitInfo*>( this );
+    }
+
+    operator VkSemaphoreWaitInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSemaphoreWaitInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( SemaphoreWaitInfo const& ) const = default;
+#else
+    bool operator==( SemaphoreWaitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( semaphoreCount == rhs.semaphoreCount )
+          && ( pSemaphores == rhs.pSemaphores )
+          && ( pValues == rhs.pValues );
+    }
+
+    bool operator!=( SemaphoreWaitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreWaitInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags = {};
+    uint32_t semaphoreCount = {};
+    const VULKAN_HPP_NAMESPACE::Semaphore* pSemaphores = {};
+    const uint64_t* pValues = {};
+
+  };
+  static_assert( sizeof( SemaphoreWaitInfo ) == sizeof( VkSemaphoreWaitInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SemaphoreWaitInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eSemaphoreWaitInfo>
+  {
+    using Type = SemaphoreWaitInfo;
+  };
+  using SemaphoreWaitInfoKHR = SemaphoreWaitInfo;
+
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  class Device;
+  template <typename Dispatch> class UniqueHandleTraits<AccelerationStructureKHR, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+  using UniqueAccelerationStructureKHR = UniqueHandle<AccelerationStructureKHR, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  using UniqueAccelerationStructureNV = UniqueHandle<AccelerationStructureKHR, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch> class UniqueHandleTraits<Buffer, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+  using UniqueBuffer = UniqueHandle<Buffer, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch> class UniqueHandleTraits<BufferView, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+  using UniqueBufferView = UniqueHandle<BufferView, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch> class UniqueHandleTraits<CommandBuffer, Dispatch> { public: using deleter = PoolFree<Device, CommandPool, Dispatch>; };
+  using UniqueCommandBuffer = UniqueHandle<CommandBuffer, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch> class UniqueHandleTraits<CommandPool, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+  using UniqueCommandPool = UniqueHandle<CommandPool, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+  template <typename Dispatch> class UniqueHandleTraits<DeferredOperationKHR, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+  using UniqueDeferredOperationKHR = UniqueHandle<DeferredOperationKHR, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+  template <typename Dispatch> class UniqueHandleTraits<DescriptorPool, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+  using UniqueDescriptorPool = UniqueHandle<DescriptorPool, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch> class UniqueHandleTraits<DescriptorSet, Dispatch> { public: using deleter = PoolFree<Device, DescriptorPool, Dispatch>; };
+  using UniqueDescriptorSet = UniqueHandle<DescriptorSet, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch> class UniqueHandleTraits<DescriptorSetLayout, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+  using UniqueDescriptorSetLayout = UniqueHandle<DescriptorSetLayout, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch> class UniqueHandleTraits<DescriptorUpdateTemplate, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+  using UniqueDescriptorUpdateTemplate = UniqueHandle<DescriptorUpdateTemplate, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  using UniqueDescriptorUpdateTemplateKHR = UniqueHandle<DescriptorUpdateTemplate, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch> class UniqueHandleTraits<DeviceMemory, Dispatch> { public: using deleter = ObjectFree<Device, Dispatch>; };
+  using UniqueDeviceMemory = UniqueHandle<DeviceMemory, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch> class UniqueHandleTraits<Event, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+  using UniqueEvent = UniqueHandle<Event, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch> class UniqueHandleTraits<Fence, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+  using UniqueFence = UniqueHandle<Fence, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch> class UniqueHandleTraits<Framebuffer, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+  using UniqueFramebuffer = UniqueHandle<Framebuffer, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch> class UniqueHandleTraits<Image, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+  using UniqueImage = UniqueHandle<Image, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch> class UniqueHandleTraits<ImageView, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+  using UniqueImageView = UniqueHandle<ImageView, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch> class UniqueHandleTraits<IndirectCommandsLayoutNV, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+  using UniqueIndirectCommandsLayoutNV = UniqueHandle<IndirectCommandsLayoutNV, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch> class UniqueHandleTraits<Pipeline, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+  using UniquePipeline = UniqueHandle<Pipeline, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch> class UniqueHandleTraits<PipelineCache, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+  using UniquePipelineCache = UniqueHandle<PipelineCache, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch> class UniqueHandleTraits<PipelineLayout, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+  using UniquePipelineLayout = UniqueHandle<PipelineLayout, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch> class UniqueHandleTraits<PrivateDataSlotEXT, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+  using UniquePrivateDataSlotEXT = UniqueHandle<PrivateDataSlotEXT, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch> class UniqueHandleTraits<QueryPool, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+  using UniqueQueryPool = UniqueHandle<QueryPool, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch> class UniqueHandleTraits<RenderPass, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+  using UniqueRenderPass = UniqueHandle<RenderPass, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch> class UniqueHandleTraits<Sampler, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+  using UniqueSampler = UniqueHandle<Sampler, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch> class UniqueHandleTraits<SamplerYcbcrConversion, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+  using UniqueSamplerYcbcrConversion = UniqueHandle<SamplerYcbcrConversion, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  using UniqueSamplerYcbcrConversionKHR = UniqueHandle<SamplerYcbcrConversion, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch> class UniqueHandleTraits<Semaphore, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+  using UniqueSemaphore = UniqueHandle<Semaphore, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch> class UniqueHandleTraits<ShaderModule, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+  using UniqueShaderModule = UniqueHandle<ShaderModule, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch> class UniqueHandleTraits<SwapchainKHR, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+  using UniqueSwapchainKHR = UniqueHandle<SwapchainKHR, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch> class UniqueHandleTraits<ValidationCacheEXT, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+  using UniqueValidationCacheEXT = UniqueHandle<ValidationCacheEXT, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+
+  class Device
+  {
+  public:
+    using CType = VkDevice;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDevice;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDevice;
+
+  public:
+    VULKAN_HPP_CONSTEXPR Device() VULKAN_HPP_NOEXCEPT
+      : m_device(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR Device( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_device(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT Device( VkDevice device ) VULKAN_HPP_NOEXCEPT
+      : m_device( device )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    Device & operator=(VkDevice device) VULKAN_HPP_NOEXCEPT
+    {
+      m_device = device;
+      return *this;
+    }
+#endif
+
+    Device & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_device = VK_NULL_HANDLE;
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( Device const& ) const = default;
+#else
+    bool operator==( Device const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_device == rhs.m_device;
+    }
+
+    bool operator!=(Device const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_device != rhs.m_device;
+    }
+
+    bool operator<(Device const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_device < rhs.m_device;
+    }
+#endif
+
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD ResultValue<uint32_t> acquireNextImage2KHR( const AcquireNextImageInfoKHR & acquireInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t timeout, VULKAN_HPP_NAMESPACE::Semaphore semaphore, VULKAN_HPP_NAMESPACE::Fence fence, uint32_t* pImageIndex, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD ResultValue<uint32_t> acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t timeout, VULKAN_HPP_NAMESPACE::Semaphore semaphore VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result acquirePerformanceConfigurationINTEL( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL* pConfiguration, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL>::type acquirePerformanceConfigurationINTEL( const PerformanceConfigurationAcquireInfoINTEL & acquireInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL, Dispatch>>::type acquirePerformanceConfigurationINTELUnique( const PerformanceConfigurationAcquireInfoINTEL & acquireInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR* pInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type acquireProfilingLockKHR( const AcquireProfilingLockInfoKHR & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo* pAllocateInfo, VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename CommandBufferAllocator = std::allocator<CommandBuffer>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<CommandBuffer, CommandBufferAllocator>>::type allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename CommandBufferAllocator = std::allocator<CommandBuffer>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = CommandBufferAllocator, typename std::enable_if<std::is_same<typename B::value_type, CommandBuffer>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<CommandBuffer, CommandBufferAllocator>>::type allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, CommandBufferAllocator & commandBufferAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename CommandBufferAllocator = std::allocator<UniqueHandle<CommandBuffer, Dispatch>>>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<UniqueHandle<CommandBuffer, Dispatch>, CommandBufferAllocator>>::type allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename CommandBufferAllocator = std::allocator<UniqueHandle<CommandBuffer, Dispatch>>, typename B = CommandBufferAllocator, typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<CommandBuffer, Dispatch>>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<UniqueHandle<CommandBuffer, Dispatch>, CommandBufferAllocator>>::type allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, CommandBufferAllocator & commandBufferAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo* pAllocateInfo, VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename DescriptorSetAllocator = std::allocator<DescriptorSet>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<DescriptorSet, DescriptorSetAllocator>>::type allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename DescriptorSetAllocator = std::allocator<DescriptorSet>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = DescriptorSetAllocator, typename std::enable_if<std::is_same<typename B::value_type, DescriptorSet>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<DescriptorSet, DescriptorSetAllocator>>::type allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, DescriptorSetAllocator & descriptorSetAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename DescriptorSetAllocator = std::allocator<UniqueHandle<DescriptorSet, Dispatch>>>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<UniqueHandle<DescriptorSet, Dispatch>, DescriptorSetAllocator>>::type allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename DescriptorSetAllocator = std::allocator<UniqueHandle<DescriptorSet, Dispatch>>, typename B = DescriptorSetAllocator, typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<DescriptorSet, Dispatch>>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<UniqueHandle<DescriptorSet, Dispatch>, DescriptorSetAllocator>>::type allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, DescriptorSetAllocator & descriptorSetAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo* pAllocateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DeviceMemory* pMemory, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceMemory>::type allocateMemory( const MemoryAllocateInfo & allocateInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DeviceMemory, Dispatch>>::type allocateMemoryUnique( const MemoryAllocateInfo & allocateInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result bindAccelerationStructureMemoryKHR( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoKHR* pBindInfos, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type bindAccelerationStructureMemoryKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoKHR> const & bindInfos, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result bindAccelerationStructureMemoryNV( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoKHR* pBindInfos, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type bindAccelerationStructureMemoryNV( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoKHR> const & bindInfos, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result bindBufferMemory2( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo* pBindInfos, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type bindBufferMemory2( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result bindBufferMemory2KHR( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo* pBindInfos, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type bindBufferMemory2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result bindImageMemory2( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo* pBindInfos, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type bindImageMemory2( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result bindImageMemory2KHR( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo* pBindInfos, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type bindImageMemory2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result buildAccelerationStructureKHR( uint32_t infoCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR* pInfos, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildOffsetInfoKHR* const * ppOffsetInfos, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result buildAccelerationStructureKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos, ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildOffsetInfoKHR* const > const & pOffsetInfos, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR* pInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result copyAccelerationStructureKHR( const CopyAccelerationStructureInfoKHR & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR* pInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result copyAccelerationStructureToMemoryKHR( const CopyAccelerationStructureToMemoryInfoKHR & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR* pInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result copyMemoryToAccelerationStructureKHR( const CopyMemoryToAccelerationStructureInfoKHR & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructure, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR>::type createAccelerationStructureKHR( const AccelerationStructureCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR, Dispatch>>::type createAccelerationStructureKHRUnique( const AccelerationStructureCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::AccelerationStructureNV* pAccelerationStructure, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<VULKAN_HPP_NAMESPACE::AccelerationStructureNV>::type createAccelerationStructureNV( const AccelerationStructureCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::AccelerationStructureNV, Dispatch>>::type createAccelerationStructureNVUnique( const AccelerationStructureCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Buffer* pBuffer, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::Buffer>::type createBuffer( const BufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Buffer, Dispatch>>::type createBufferUnique( const BufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::BufferView* pView, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::BufferView>::type createBufferView( const BufferViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::BufferView, Dispatch>>::type createBufferViewUnique( const BufferViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::CommandPool* pCommandPool, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::CommandPool>::type createCommandPool( const CommandPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandPool, Dispatch>>::type createCommandPoolUnique( const CommandPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename PipelineAllocator = std::allocator<Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD ResultValue<std::vector<Pipeline, PipelineAllocator>> createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename PipelineAllocator = std::allocator<Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = PipelineAllocator, typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD ResultValue<std::vector<Pipeline, PipelineAllocator>> createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, Optional<const AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD ResultValue<Pipeline> createComputePipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename PipelineAllocator = std::allocator<UniqueHandle<Pipeline, Dispatch>>>
+    VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename PipelineAllocator = std::allocator<UniqueHandle<Pipeline, Dispatch>>, typename B = PipelineAllocator, typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, Optional<const AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD ResultValue<UniqueHandle<Pipeline, Dispatch>> createComputePipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createDeferredOperationKHR( const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DeferredOperationKHR* pDeferredOperation, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<VULKAN_HPP_NAMESPACE::DeferredOperationKHR>::type createDeferredOperationKHR( Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DeferredOperationKHR, Dispatch>>::type createDeferredOperationKHRUnique( Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorPool* pDescriptorPool, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorPool>::type createDescriptorPool( const DescriptorPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorPool, Dispatch>>::type createDescriptorPoolUnique( const DescriptorPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayout, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorSetLayout>::type createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSetLayout, Dispatch>>::type createDescriptorSetLayoutUnique( const DescriptorSetLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type createDescriptorUpdateTemplate( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>>::type createDescriptorUpdateTemplateUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type createDescriptorUpdateTemplateKHR( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>>::type createDescriptorUpdateTemplateKHRUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Event* pEvent, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::Event>::type createEvent( const EventCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Event, Dispatch>>::type createEventUnique( const EventCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Fence* pFence, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type createFence( const FenceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type createFenceUnique( const FenceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Framebuffer* pFramebuffer, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::Framebuffer>::type createFramebuffer( const FramebufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Framebuffer, Dispatch>>::type createFramebufferUnique( const FramebufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename PipelineAllocator = std::allocator<Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD ResultValue<std::vector<Pipeline, PipelineAllocator>> createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename PipelineAllocator = std::allocator<Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = PipelineAllocator, typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD ResultValue<std::vector<Pipeline, PipelineAllocator>> createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, Optional<const AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD ResultValue<Pipeline> createGraphicsPipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename PipelineAllocator = std::allocator<UniqueHandle<Pipeline, Dispatch>>>
+    VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename PipelineAllocator = std::allocator<UniqueHandle<Pipeline, Dispatch>>, typename B = PipelineAllocator, typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, Optional<const AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD ResultValue<UniqueHandle<Pipeline, Dispatch>> createGraphicsPipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Image* pImage, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::Image>::type createImage( const ImageCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Image, Dispatch>>::type createImageUnique( const ImageCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ImageView* pView, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageView>::type createImageView( const ImageViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ImageView, Dispatch>>::type createImageViewUnique( const ImageViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createIndirectCommandsLayoutNV( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV* pIndirectCommandsLayout, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV>::type createIndirectCommandsLayoutNV( const IndirectCommandsLayoutCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV, Dispatch>>::type createIndirectCommandsLayoutNVUnique( const IndirectCommandsLayoutCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::PipelineCache* pPipelineCache, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineCache>::type createPipelineCache( const PipelineCacheCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineCache, Dispatch>>::type createPipelineCacheUnique( const PipelineCacheCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::PipelineLayout* pPipelineLayout, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineLayout>::type createPipelineLayout( const PipelineLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineLayout, Dispatch>>::type createPipelineLayoutUnique( const PipelineLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT* pPrivateDataSlot, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT>::type createPrivateDataSlotEXT( const PrivateDataSlotCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT, Dispatch>>::type createPrivateDataSlotEXTUnique( const PrivateDataSlotCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::QueryPool* pQueryPool, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::QueryPool>::type createQueryPool( const QueryPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::QueryPool, Dispatch>>::type createQueryPoolUnique( const QueryPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename PipelineAllocator = std::allocator<Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD ResultValue<std::vector<Pipeline, PipelineAllocator>> createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename PipelineAllocator = std::allocator<Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = PipelineAllocator, typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD ResultValue<std::vector<Pipeline, PipelineAllocator>> createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos, Optional<const AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD ResultValue<Pipeline> createRayTracingPipelineKHR( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename PipelineAllocator = std::allocator<UniqueHandle<Pipeline, Dispatch>>>
+    VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> createRayTracingPipelinesKHRUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename PipelineAllocator = std::allocator<UniqueHandle<Pipeline, Dispatch>>, typename B = PipelineAllocator, typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> createRayTracingPipelinesKHRUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos, Optional<const AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD ResultValue<UniqueHandle<Pipeline, Dispatch>> createRayTracingPipelineKHRUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename PipelineAllocator = std::allocator<Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD ResultValue<std::vector<Pipeline, PipelineAllocator>> createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename PipelineAllocator = std::allocator<Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = PipelineAllocator, typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD ResultValue<std::vector<Pipeline, PipelineAllocator>> createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos, Optional<const AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD ResultValue<Pipeline> createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename PipelineAllocator = std::allocator<UniqueHandle<Pipeline, Dispatch>>>
+    VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename PipelineAllocator = std::allocator<UniqueHandle<Pipeline, Dispatch>>, typename B = PipelineAllocator, typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos, Optional<const AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD ResultValue<UniqueHandle<Pipeline, Dispatch>> createRayTracingPipelineNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::RenderPass* pRenderPass, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type createRenderPass( const RenderPassCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type createRenderPassUnique( const RenderPassCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::RenderPass* pRenderPass, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type createRenderPass2( const RenderPassCreateInfo2 & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type createRenderPass2Unique( const RenderPassCreateInfo2 & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::RenderPass* pRenderPass, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type createRenderPass2KHR( const RenderPassCreateInfo2 & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type createRenderPass2KHRUnique( const RenderPassCreateInfo2 & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Sampler* pSampler, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::Sampler>::type createSampler( const SamplerCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Sampler, Dispatch>>::type createSamplerUnique( const SamplerCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion* pYcbcrConversion, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type createSamplerYcbcrConversion( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>>::type createSamplerYcbcrConversionUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion* pYcbcrConversion, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type createSamplerYcbcrConversionKHR( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>>::type createSamplerYcbcrConversionKHRUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Semaphore* pSemaphore, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::Semaphore>::type createSemaphore( const SemaphoreCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Semaphore, Dispatch>>::type createSemaphoreUnique( const SemaphoreCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ShaderModule* pShaderModule, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::ShaderModule>::type createShaderModule( const ShaderModuleCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderModule, Dispatch>>::type createShaderModuleUnique( const ShaderModuleCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createSharedSwapchainsKHR( uint32_t swapchainCount, const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename SwapchainKHRAllocator = std::allocator<SwapchainKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<SwapchainKHR, SwapchainKHRAllocator>>::type createSharedSwapchainsKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename SwapchainKHRAllocator = std::allocator<SwapchainKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = SwapchainKHRAllocator, typename std::enable_if<std::is_same<typename B::value_type, SwapchainKHR>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<SwapchainKHR, SwapchainKHRAllocator>>::type createSharedSwapchainsKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos, Optional<const AllocationCallbacks> allocator, SwapchainKHRAllocator & swapchainKHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<SwapchainKHR>::type createSharedSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename SwapchainKHRAllocator = std::allocator<UniqueHandle<SwapchainKHR, Dispatch>>>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<UniqueHandle<SwapchainKHR, Dispatch>, SwapchainKHRAllocator>>::type createSharedSwapchainsKHRUnique( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename SwapchainKHRAllocator = std::allocator<UniqueHandle<SwapchainKHR, Dispatch>>, typename B = SwapchainKHRAllocator, typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<SwapchainKHR, Dispatch>>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<UniqueHandle<SwapchainKHR, Dispatch>, SwapchainKHRAllocator>>::type createSharedSwapchainsKHRUnique( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos, Optional<const AllocationCallbacks> allocator, SwapchainKHRAllocator & swapchainKHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<UniqueHandle<SwapchainKHR, Dispatch>>::type createSharedSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchain, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SwapchainKHR>::type createSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>>::type createSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ValidationCacheEXT* pValidationCache, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<VULKAN_HPP_NAMESPACE::ValidationCacheEXT>::type createValidationCacheEXT( const ValidationCacheCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ValidationCacheEXT, Dispatch>>::type createValidationCacheEXTUnique( const ValidationCacheCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT* pNameInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT & nameInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT* pTagInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT & tagInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::Buffer buffer VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::Buffer buffer VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyEvent( VULKAN_HPP_NAMESPACE::Event event VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyEvent( VULKAN_HPP_NAMESPACE::Event event VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::Event event VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::Event event VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyFence( VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyFence( VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyImage( VULKAN_HPP_NAMESPACE::Image image VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyImage( VULKAN_HPP_NAMESPACE::Image image VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::Image image VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::Image image VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::ImageView imageView VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::ImageView imageView VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::Sampler sampler VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::Sampler sampler VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result waitIdle( Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type waitIdle( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT* pDisplayPowerInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<void>::type displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayPowerInfoEXT & displayPowerInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result flushMappedMemoryRanges( uint32_t memoryRangeCount, const VULKAN_HPP_NAMESPACE::MappedMemoryRange* pMemoryRanges, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type flushMappedMemoryRanges( ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<void>::type freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<void>::type free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void free( VULKAN_HPP_NAMESPACE::DeviceMemory memory VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void free( VULKAN_HPP_NAMESPACE::DeviceMemory memory VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    DeviceAddress getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR* pInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    DeviceAddress getAccelerationStructureAddressKHR( const AccelerationStructureDeviceAddressInfoKHR & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, size_t dataSize, void* pData, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type getAccelerationStructureHandleNV(  VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, ArrayProxy<T> const &data, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT  ) const;
+    template <typename T, typename Allocator = std::allocator<T>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<T,Allocator>>::type getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, size_t dataSize, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<T>::type getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getAccelerationStructureMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoKHR* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getAccelerationStructureMemoryRequirementsKHR( const AccelerationStructureMemoryRequirementsInfoKHR & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getAccelerationStructureMemoryRequirementsKHR( const AccelerationStructureMemoryRequirementsInfoKHR & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR* pMemoryRequirements, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer* buffer, VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID* pProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>::type getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<StructureChain<X, Y, Z...>>::type getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    DeviceAddress getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    DeviceAddress getBufferAddress( const BufferDeviceAddressInfo & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    DeviceAddress getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    DeviceAddress getBufferAddressEXT( const BufferDeviceAddressInfo & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    DeviceAddress getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    DeviceAddress getBufferAddressKHR( const BufferDeviceAddressInfo & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::MemoryRequirements* pMemoryRequirements, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    uint64_t getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    uint64_t getBufferOpaqueCaptureAddress( const BufferDeviceAddressInfo & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    uint64_t getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    uint64_t getBufferOpaqueCaptureAddressKHR( const BufferDeviceAddressInfo & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getCalibratedTimestampsEXT( uint32_t timestampCount, const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<uint64_t>::type getCalibratedTimestampsEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> const &timestampInfos, ArrayProxy<uint64_t> const &timestamps, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    uint32_t getDeferredOperationMaxConcurrencyKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    uint32_t getDeferredOperationMaxConcurrencyKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo* pCreateInfo, VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport* pSupport, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo* pCreateInfo, VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport* pSupport, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getAccelerationStructureCompatibilityKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionKHR* version, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<void>::type getAccelerationStructureCompatibilityKHR( const AccelerationStructureVersionKHR & version, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getGroupPresentCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR>::type getGroupPresentCapabilitiesKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR* pModes, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type getGroupSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR* pModes, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize* pCommittedMemoryInBytes, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceSize getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    uint64_t getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo* pInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    uint64_t getMemoryOpaqueCaptureAddress( const DeviceMemoryOpaqueCaptureAddressInfo & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    uint64_t getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo* pInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    uint64_t getMemoryOpaqueCaptureAddressKHR( const DeviceMemoryOpaqueCaptureAddressInfo & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    PFN_vkVoidFunction getProcAddr( const char* pName, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    PFN_vkVoidFunction getProcAddr( const std::string & name, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, VULKAN_HPP_NAMESPACE::Queue* pQueue, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NAMESPACE::Queue getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2* pQueueInfo, VULKAN_HPP_NAMESPACE::Queue* pQueue, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NAMESPACE::Queue getQueue2( const DeviceQueueInfo2 & queueInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<int>::type getFenceFdKHR( const FenceGetFdInfoKHR & getFdInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<HANDLE>::type getFenceWin32HandleKHR( const FenceGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getGeneratedCommandsMemoryRequirementsNV( const GeneratedCommandsMemoryRequirementsInfoNV & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getGeneratedCommandsMemoryRequirementsNV( const GeneratedCommandsMemoryRequirementsInfoNV & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT* pProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT>::type getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::MemoryRequirements* pMemoryRequirements, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, uint32_t* pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements* pSparseMemoryRequirements VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename SparseImageMemoryRequirementsAllocator = std::allocator<SparseImageMemoryRequirements>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD std::vector<SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename SparseImageMemoryRequirementsAllocator = std::allocator<SparseImageMemoryRequirements>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = SparseImageMemoryRequirementsAllocator, typename std::enable_if<std::is_same<typename B::value_type, SparseImageMemoryRequirements>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD std::vector<SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, SparseImageMemoryRequirementsAllocator & sparseImageMemoryRequirementsAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2* pSparseMemoryRequirements VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename SparseImageMemoryRequirements2Allocator = std::allocator<SparseImageMemoryRequirements2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename SparseImageMemoryRequirements2Allocator = std::allocator<SparseImageMemoryRequirements2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = SparseImageMemoryRequirements2Allocator, typename std::enable_if<std::is_same<typename B::value_type, SparseImageMemoryRequirements2>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2* pSparseMemoryRequirements VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename SparseImageMemoryRequirements2Allocator = std::allocator<SparseImageMemoryRequirements2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename SparseImageMemoryRequirements2Allocator = std::allocator<SparseImageMemoryRequirements2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = SparseImageMemoryRequirements2Allocator, typename std::enable_if<std::is_same<typename B::value_type, SparseImageMemoryRequirements2>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource* pSubresource, VULKAN_HPP_NAMESPACE::SubresourceLayout* pLayout, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, const ImageSubresource & subresource, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX* pProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX>::type getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    uint32_t getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX* pInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    uint32_t getImageViewHandleNVX( const ImageViewHandleInfoNVX & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<struct AHardwareBuffer*>::type getMemoryAndroidHardwareBufferANDROID( const MemoryGetAndroidHardwareBufferInfoANDROID & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<int>::type getMemoryFdKHR( const MemoryGetFdInfoKHR & getFdInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR* pMemoryFdProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR>::type getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT>::type getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<HANDLE>::type getMemoryWin32HandleKHR( const MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<HANDLE>::type getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR>::type getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint32_t* pPresentationTimingCount, VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE* pPresentationTimings, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator  = std::allocator<PastPresentationTimingGOOGLE>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PastPresentationTimingGOOGLE,Allocator>>::type getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template<typename Allocator  = std::allocator<PastPresentationTimingGOOGLE>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, PastPresentationTimingGOOGLE>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PastPresentationTimingGOOGLE,Allocator>>::type getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, VULKAN_HPP_NAMESPACE::PerformanceValueINTEL* pValue, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::PerformanceValueINTEL>::type getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, size_t* pDataSize, void* pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator  = std::allocator<uint8_t>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<uint8_t,Allocator>>::type getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template<typename Allocator  = std::allocator<uint8_t>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, uint8_t>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<uint8_t,Allocator>>::type getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pInternalRepresentationCount, VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR* pInternalRepresentations, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator  = std::allocator<PipelineExecutableInternalRepresentationKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PipelineExecutableInternalRepresentationKHR,Allocator>>::type getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template<typename Allocator  = std::allocator<PipelineExecutableInternalRepresentationKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, PipelineExecutableInternalRepresentationKHR>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PipelineExecutableInternalRepresentationKHR,Allocator>>::type getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo, Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR* pPipelineInfo, uint32_t* pExecutableCount, VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR* pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator  = std::allocator<PipelineExecutablePropertiesKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PipelineExecutablePropertiesKHR,Allocator>>::type getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template<typename Allocator  = std::allocator<PipelineExecutablePropertiesKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, PipelineExecutablePropertiesKHR>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PipelineExecutablePropertiesKHR,Allocator>>::type getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo, Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pStatisticCount, VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR* pStatistics, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator  = std::allocator<PipelineExecutableStatisticKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PipelineExecutableStatisticKHR,Allocator>>::type getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template<typename Allocator  = std::allocator<PipelineExecutableStatisticKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, PipelineExecutableStatisticKHR>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PipelineExecutableStatisticKHR,Allocator>>::type getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo, Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, uint64_t* pData, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD uint64_t getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getQueryPoolResults(  VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, ArrayProxy<T> const &data, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT  ) const;
+    template <typename T, typename Allocator = std::allocator<T>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD ResultValue<std::vector<T,Allocator>> getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD ResultValue<T> getQueryPoolResult( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type getRayTracingCaptureReplayShaderGroupHandlesKHR(  VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy<T> const &data, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT  ) const;
+    template <typename T, typename Allocator = std::allocator<T>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<T,Allocator>>::type getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<T>::type getRayTracingCaptureReplayShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type getRayTracingShaderGroupHandlesKHR(  VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy<T> const &data, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT  ) const;
+    template <typename T, typename Allocator = std::allocator<T>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<T,Allocator>>::type getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<T>::type getRayTracingShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type getRayTracingShaderGroupHandlesNV(  VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy<T> const &data, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT  ) const;
+    template <typename T, typename Allocator = std::allocator<T>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<T,Allocator>>::type getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<T>::type getRayTracingShaderGroupHandleNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE* pDisplayTimingProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE>::type getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, VULKAN_HPP_NAMESPACE::Extent2D* pGranularity, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Extent2D getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t* pValue, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<uint64_t>::type getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t* pValue, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<uint64_t>::type getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<int>::type getSemaphoreFdKHR( const SemaphoreGetFdInfoKHR & getFdInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<HANDLE>::type getSemaphoreWin32HandleKHR( const SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator  = std::allocator<uint8_t>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<uint8_t,Allocator>>::type getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template<typename Allocator  = std::allocator<uint8_t>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, uint8_t>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<uint8_t,Allocator>>::type getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<uint64_t>::type getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VULKAN_HPP_NAMESPACE::Image* pSwapchainImages, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator  = std::allocator<Image>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<Image,Allocator>>::type getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template<typename Allocator  = std::allocator<Image>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, Image>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<Image,Allocator>>::type getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, size_t* pDataSize, void* pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator  = std::allocator<uint8_t>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<uint8_t,Allocator>>::type getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template<typename Allocator  = std::allocator<uint8_t>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, uint8_t>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<uint8_t,Allocator>>::type getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR* pImportFenceFdInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type importFenceFdKHR( const ImportFenceFdInfoKHR & importFenceFdInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL* pInitializeInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type initializePerformanceApiINTEL( const InitializePerformanceApiInfoINTEL & initializeInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result invalidateMappedMemoryRanges( uint32_t memoryRangeCount, const VULKAN_HPP_NAMESPACE::MappedMemoryRange* pMemoryRanges, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type invalidateMappedMemoryRanges( ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::DeviceSize size, VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, void** ppData, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void*>::type mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::DeviceSize size, VULKAN_HPP_NAMESPACE::MemoryMapFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, uint32_t srcCacheCount, const VULKAN_HPP_NAMESPACE::PipelineCache* pSrcCaches, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::PipelineCache> const & srcCaches, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, uint32_t srcCacheCount, const VULKAN_HPP_NAMESPACE::ValidationCacheEXT* pSrcCaches, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ValidationCacheEXT> const & srcCaches, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT* pDeviceEventInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Fence* pFence, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type registerEventEXT( const DeviceEventInfoEXT & deviceEventInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type registerEventEXTUnique( const DeviceEventInfoEXT & deviceEventInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT* pDisplayEventInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Fence* pFence, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayEventInfoEXT & displayEventInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type registerDisplayEventEXTUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayEventInfoEXT & displayEventInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void releaseProfilingLockKHR( Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<void>::type resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<void>::type resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<void>::type resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result resetFences( uint32_t fenceCount, const VULKAN_HPP_NAMESPACE::Fence* pFences, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<void>::type resetFences( ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT* pNameInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type setDebugUtilsObjectNameEXT( const DebugUtilsObjectNameInfoEXT & nameInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT* pTagInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type setDebugUtilsObjectTagEXT( const DebugUtilsObjectTagInfoEXT & tagInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setHdrMetadataEXT( uint32_t swapchainCount, const VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains, const VULKAN_HPP_NAMESPACE::HdrMetadataEXT* pMetadata, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setHdrMetadataEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const & swapchains, ArrayProxy<const VULKAN_HPP_NAMESPACE::HdrMetadataEXT> const & metadata, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain, VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, uint64_t data, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<void>::type setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, uint64_t data, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo* pSignalInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type signalSemaphore( const SemaphoreSignalInfo & signalInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo* pSignalInfo, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type signalSemaphoreKHR( const SemaphoreSignalInfo & signalInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void uninitializePerformanceApiINTEL( Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void updateDescriptorSets( uint32_t descriptorWriteCount, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const VULKAN_HPP_NAMESPACE::CopyDescriptorSet* pDescriptorCopies, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void updateDescriptorSets( ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites, ArrayProxy<const VULKAN_HPP_NAMESPACE::CopyDescriptorSet> const & descriptorCopies, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result waitForFences( uint32_t fenceCount, const VULKAN_HPP_NAMESPACE::Fence* pFences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result waitForFences( ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo* pWaitInfo, uint64_t timeout, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result waitSemaphores( const SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo* pWaitInfo, uint64_t timeout, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result waitSemaphoresKHR( const SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, size_t dataSize, void* pData, size_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type writeAccelerationStructuresPropertiesKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const &accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, ArrayProxy<T> const &data, size_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDevice() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_device;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_device != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_device == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkDevice m_device;
+  };
+  static_assert( sizeof( VULKAN_HPP_NAMESPACE::Device ) == sizeof( VkDevice ), "handle and wrapper have different size!" );
+
+  template <>
+  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eDevice>
+  {
+    using type = VULKAN_HPP_NAMESPACE::Device;
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDevice>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::Device;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDevice>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::Device;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Device>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  struct DisplayModeParametersKHR
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DisplayModeParametersKHR(VULKAN_HPP_NAMESPACE::Extent2D visibleRegion_ = {}, uint32_t refreshRate_ = {}) VULKAN_HPP_NOEXCEPT
+    : visibleRegion( visibleRegion_ ), refreshRate( refreshRate_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DisplayModeParametersKHR( DisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DisplayModeParametersKHR( VkDisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DisplayModeParametersKHR & operator=( VkDisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR const *>( &rhs );
+      return *this;
+    }
+
+    DisplayModeParametersKHR & operator=( DisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DisplayModeParametersKHR ) );
+      return *this;
+    }
+
+    DisplayModeParametersKHR & setVisibleRegion( VULKAN_HPP_NAMESPACE::Extent2D const & visibleRegion_ ) VULKAN_HPP_NOEXCEPT
+    {
+      visibleRegion = visibleRegion_;
+      return *this;
+    }
+
+    DisplayModeParametersKHR & setRefreshRate( uint32_t refreshRate_ ) VULKAN_HPP_NOEXCEPT
+    {
+      refreshRate = refreshRate_;
+      return *this;
+    }
+
+
+    operator VkDisplayModeParametersKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDisplayModeParametersKHR*>( this );
+    }
+
+    operator VkDisplayModeParametersKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDisplayModeParametersKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DisplayModeParametersKHR const& ) const = default;
+#else
+    bool operator==( DisplayModeParametersKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( visibleRegion == rhs.visibleRegion )
+          && ( refreshRate == rhs.refreshRate );
+    }
+
+    bool operator!=( DisplayModeParametersKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::Extent2D visibleRegion = {};
+    uint32_t refreshRate = {};
+
+  };
+  static_assert( sizeof( DisplayModeParametersKHR ) == sizeof( VkDisplayModeParametersKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DisplayModeParametersKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct DisplayModeCreateInfoKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayModeCreateInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DisplayModeCreateInfoKHR(VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags_ = {}, VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), parameters( parameters_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DisplayModeCreateInfoKHR( DisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DisplayModeCreateInfoKHR( VkDisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DisplayModeCreateInfoKHR & operator=( VkDisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+    DisplayModeCreateInfoKHR & operator=( DisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DisplayModeCreateInfoKHR ) );
+      return *this;
+    }
+
+    DisplayModeCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DisplayModeCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    DisplayModeCreateInfoKHR & setParameters( VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR const & parameters_ ) VULKAN_HPP_NOEXCEPT
+    {
+      parameters = parameters_;
+      return *this;
+    }
+
+
+    operator VkDisplayModeCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDisplayModeCreateInfoKHR*>( this );
+    }
+
+    operator VkDisplayModeCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDisplayModeCreateInfoKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DisplayModeCreateInfoKHR const& ) const = default;
+#else
+    bool operator==( DisplayModeCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( parameters == rhs.parameters );
+    }
+
+    bool operator!=( DisplayModeCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayModeCreateInfoKHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags = {};
+    VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters = {};
+
+  };
+  static_assert( sizeof( DisplayModeCreateInfoKHR ) == sizeof( VkDisplayModeCreateInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DisplayModeCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eDisplayModeCreateInfoKHR>
+  {
+    using Type = DisplayModeCreateInfoKHR;
+  };
+
+  class DisplayModeKHR
+  {
+  public:
+    using CType = VkDisplayModeKHR;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDisplayModeKHR;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayModeKHR;
+
+  public:
+    VULKAN_HPP_CONSTEXPR DisplayModeKHR() VULKAN_HPP_NOEXCEPT
+      : m_displayModeKHR(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR DisplayModeKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_displayModeKHR(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT DisplayModeKHR( VkDisplayModeKHR displayModeKHR ) VULKAN_HPP_NOEXCEPT
+      : m_displayModeKHR( displayModeKHR )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    DisplayModeKHR & operator=(VkDisplayModeKHR displayModeKHR) VULKAN_HPP_NOEXCEPT
+    {
+      m_displayModeKHR = displayModeKHR;
+      return *this;
+    }
+#endif
+
+    DisplayModeKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_displayModeKHR = VK_NULL_HANDLE;
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DisplayModeKHR const& ) const = default;
+#else
+    bool operator==( DisplayModeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_displayModeKHR == rhs.m_displayModeKHR;
+    }
+
+    bool operator!=(DisplayModeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_displayModeKHR != rhs.m_displayModeKHR;
+    }
+
+    bool operator<(DisplayModeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_displayModeKHR < rhs.m_displayModeKHR;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDisplayModeKHR() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_displayModeKHR;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_displayModeKHR != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_displayModeKHR == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkDisplayModeKHR m_displayModeKHR;
+  };
+  static_assert( sizeof( VULKAN_HPP_NAMESPACE::DisplayModeKHR ) == sizeof( VkDisplayModeKHR ), "handle and wrapper have different size!" );
+
+  template <>
+  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eDisplayModeKHR>
+  {
+    using type = VULKAN_HPP_NAMESPACE::DisplayModeKHR;
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDisplayModeKHR>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::DisplayModeKHR;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayModeKHR>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::DisplayModeKHR;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DisplayModeKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  struct ExtensionProperties
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR_14 ExtensionProperties(std::array<char,VK_MAX_EXTENSION_NAME_SIZE> const& extensionName_ = {}, uint32_t specVersion_ = {}) VULKAN_HPP_NOEXCEPT
+    : extensionName( extensionName_ ), specVersion( specVersion_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 ExtensionProperties( ExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ExtensionProperties( VkExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ExtensionProperties & operator=( VkExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExtensionProperties const *>( &rhs );
+      return *this;
+    }
+
+    ExtensionProperties & operator=( ExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ExtensionProperties ) );
+      return *this;
+    }
+
+
+    operator VkExtensionProperties const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExtensionProperties*>( this );
+    }
+
+    operator VkExtensionProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExtensionProperties*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ExtensionProperties const& ) const = default;
+#else
+    bool operator==( ExtensionProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( extensionName == rhs.extensionName )
+          && ( specVersion == rhs.specVersion );
+    }
+
+    bool operator!=( ExtensionProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> extensionName = {};
+    uint32_t specVersion = {};
+
+  };
+  static_assert( sizeof( ExtensionProperties ) == sizeof( VkExtensionProperties ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ExtensionProperties>::value, "struct wrapper is not a standard layout!" );
+
+  struct LayerProperties
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR_14 LayerProperties(std::array<char,VK_MAX_EXTENSION_NAME_SIZE> const& layerName_ = {}, uint32_t specVersion_ = {}, uint32_t implementationVersion_ = {}, std::array<char,VK_MAX_DESCRIPTION_SIZE> const& description_ = {}) VULKAN_HPP_NOEXCEPT
+    : layerName( layerName_ ), specVersion( specVersion_ ), implementationVersion( implementationVersion_ ), description( description_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 LayerProperties( LayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    LayerProperties( VkLayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    LayerProperties & operator=( VkLayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::LayerProperties const *>( &rhs );
+      return *this;
+    }
+
+    LayerProperties & operator=( LayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( LayerProperties ) );
+      return *this;
+    }
+
+
+    operator VkLayerProperties const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkLayerProperties*>( this );
+    }
+
+    operator VkLayerProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkLayerProperties*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( LayerProperties const& ) const = default;
+#else
+    bool operator==( LayerProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( layerName == rhs.layerName )
+          && ( specVersion == rhs.specVersion )
+          && ( implementationVersion == rhs.implementationVersion )
+          && ( description == rhs.description );
+    }
+
+    bool operator!=( LayerProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> layerName = {};
+    uint32_t specVersion = {};
+    uint32_t implementationVersion = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description = {};
+
+  };
+  static_assert( sizeof( LayerProperties ) == sizeof( VkLayerProperties ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<LayerProperties>::value, "struct wrapper is not a standard layout!" );
+
+  struct PerformanceCounterKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceCounterKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR_14 PerformanceCounterKHR(VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR unit_ = VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR::eGeneric, VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR scope_ = VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR::eCommandBuffer, VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR storage_ = VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR::eInt32, std::array<uint8_t,VK_UUID_SIZE> const& uuid_ = {}) VULKAN_HPP_NOEXCEPT
+    : unit( unit_ ), scope( scope_ ), storage( storage_ ), uuid( uuid_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 PerformanceCounterKHR( PerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PerformanceCounterKHR( VkPerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PerformanceCounterKHR & operator=( VkPerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR const *>( &rhs );
+      return *this;
+    }
+
+    PerformanceCounterKHR & operator=( PerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PerformanceCounterKHR ) );
+      return *this;
+    }
+
+
+    operator VkPerformanceCounterKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPerformanceCounterKHR*>( this );
+    }
+
+    operator VkPerformanceCounterKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPerformanceCounterKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PerformanceCounterKHR const& ) const = default;
+#else
+    bool operator==( PerformanceCounterKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( unit == rhs.unit )
+          && ( scope == rhs.scope )
+          && ( storage == rhs.storage )
+          && ( uuid == rhs.uuid );
+    }
+
+    bool operator!=( PerformanceCounterKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceCounterKHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR unit = VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR::eGeneric;
+    VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR scope = VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR::eCommandBuffer;
+    VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR storage = VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR::eInt32;
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> uuid = {};
+
+  };
+  static_assert( sizeof( PerformanceCounterKHR ) == sizeof( VkPerformanceCounterKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PerformanceCounterKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePerformanceCounterKHR>
+  {
+    using Type = PerformanceCounterKHR;
+  };
+
+  struct PerformanceCounterDescriptionKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceCounterDescriptionKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR_14 PerformanceCounterDescriptionKHR(VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR flags_ = {}, std::array<char,VK_MAX_DESCRIPTION_SIZE> const& name_ = {}, std::array<char,VK_MAX_DESCRIPTION_SIZE> const& category_ = {}, std::array<char,VK_MAX_DESCRIPTION_SIZE> const& description_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), name( name_ ), category( category_ ), description( description_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 PerformanceCounterDescriptionKHR( PerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PerformanceCounterDescriptionKHR( VkPerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PerformanceCounterDescriptionKHR & operator=( VkPerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR const *>( &rhs );
+      return *this;
+    }
+
+    PerformanceCounterDescriptionKHR & operator=( PerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PerformanceCounterDescriptionKHR ) );
+      return *this;
+    }
+
+
+    operator VkPerformanceCounterDescriptionKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPerformanceCounterDescriptionKHR*>( this );
+    }
+
+    operator VkPerformanceCounterDescriptionKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPerformanceCounterDescriptionKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PerformanceCounterDescriptionKHR const& ) const = default;
+#else
+    bool operator==( PerformanceCounterDescriptionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( name == rhs.name )
+          && ( category == rhs.category )
+          && ( description == rhs.description );
+    }
+
+    bool operator!=( PerformanceCounterDescriptionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceCounterDescriptionKHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR flags = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> name = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> category = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description = {};
+
+  };
+  static_assert( sizeof( PerformanceCounterDescriptionKHR ) == sizeof( VkPerformanceCounterDescriptionKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PerformanceCounterDescriptionKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePerformanceCounterDescriptionKHR>
+  {
+    using Type = PerformanceCounterDescriptionKHR;
+  };
+
+  struct DisplayModePropertiesKHR
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DisplayModePropertiesKHR(VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ = {}, VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters_ = {}) VULKAN_HPP_NOEXCEPT
+    : displayMode( displayMode_ ), parameters( parameters_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DisplayModePropertiesKHR( DisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DisplayModePropertiesKHR( VkDisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DisplayModePropertiesKHR & operator=( VkDisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR const *>( &rhs );
+      return *this;
+    }
+
+    DisplayModePropertiesKHR & operator=( DisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DisplayModePropertiesKHR ) );
+      return *this;
+    }
+
+
+    operator VkDisplayModePropertiesKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDisplayModePropertiesKHR*>( this );
+    }
+
+    operator VkDisplayModePropertiesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDisplayModePropertiesKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DisplayModePropertiesKHR const& ) const = default;
+#else
+    bool operator==( DisplayModePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( displayMode == rhs.displayMode )
+          && ( parameters == rhs.parameters );
+    }
+
+    bool operator!=( DisplayModePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode = {};
+    VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters = {};
+
+  };
+  static_assert( sizeof( DisplayModePropertiesKHR ) == sizeof( VkDisplayModePropertiesKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DisplayModePropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct DisplayModeProperties2KHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayModeProperties2KHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DisplayModeProperties2KHR(VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR displayModeProperties_ = {}) VULKAN_HPP_NOEXCEPT
+    : displayModeProperties( displayModeProperties_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DisplayModeProperties2KHR( DisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DisplayModeProperties2KHR( VkDisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DisplayModeProperties2KHR & operator=( VkDisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR const *>( &rhs );
+      return *this;
+    }
+
+    DisplayModeProperties2KHR & operator=( DisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DisplayModeProperties2KHR ) );
+      return *this;
+    }
+
+
+    operator VkDisplayModeProperties2KHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDisplayModeProperties2KHR*>( this );
+    }
+
+    operator VkDisplayModeProperties2KHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDisplayModeProperties2KHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DisplayModeProperties2KHR const& ) const = default;
+#else
+    bool operator==( DisplayModeProperties2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( displayModeProperties == rhs.displayModeProperties );
+    }
+
+    bool operator!=( DisplayModeProperties2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayModeProperties2KHR;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR displayModeProperties = {};
+
+  };
+  static_assert( sizeof( DisplayModeProperties2KHR ) == sizeof( VkDisplayModeProperties2KHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DisplayModeProperties2KHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eDisplayModeProperties2KHR>
+  {
+    using Type = DisplayModeProperties2KHR;
+  };
+
+  struct DisplayPlaneInfo2KHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPlaneInfo2KHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DisplayPlaneInfo2KHR(VULKAN_HPP_NAMESPACE::DisplayModeKHR mode_ = {}, uint32_t planeIndex_ = {}) VULKAN_HPP_NOEXCEPT
+    : mode( mode_ ), planeIndex( planeIndex_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DisplayPlaneInfo2KHR( DisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DisplayPlaneInfo2KHR( VkDisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DisplayPlaneInfo2KHR & operator=( VkDisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR const *>( &rhs );
+      return *this;
+    }
+
+    DisplayPlaneInfo2KHR & operator=( DisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DisplayPlaneInfo2KHR ) );
+      return *this;
+    }
+
+    DisplayPlaneInfo2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DisplayPlaneInfo2KHR & setMode( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      mode = mode_;
+      return *this;
+    }
+
+    DisplayPlaneInfo2KHR & setPlaneIndex( uint32_t planeIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      planeIndex = planeIndex_;
+      return *this;
+    }
+
+
+    operator VkDisplayPlaneInfo2KHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDisplayPlaneInfo2KHR*>( this );
+    }
+
+    operator VkDisplayPlaneInfo2KHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDisplayPlaneInfo2KHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DisplayPlaneInfo2KHR const& ) const = default;
+#else
+    bool operator==( DisplayPlaneInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( mode == rhs.mode )
+          && ( planeIndex == rhs.planeIndex );
+    }
+
+    bool operator!=( DisplayPlaneInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneInfo2KHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::DisplayModeKHR mode = {};
+    uint32_t planeIndex = {};
+
+  };
+  static_assert( sizeof( DisplayPlaneInfo2KHR ) == sizeof( VkDisplayPlaneInfo2KHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DisplayPlaneInfo2KHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eDisplayPlaneInfo2KHR>
+  {
+    using Type = DisplayPlaneInfo2KHR;
+  };
+
+  struct DisplayPlaneCapabilitiesKHR
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilitiesKHR(VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagsKHR supportedAlpha_ = {}, VULKAN_HPP_NAMESPACE::Offset2D minSrcPosition_ = {}, VULKAN_HPP_NAMESPACE::Offset2D maxSrcPosition_ = {}, VULKAN_HPP_NAMESPACE::Extent2D minSrcExtent_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxSrcExtent_ = {}, VULKAN_HPP_NAMESPACE::Offset2D minDstPosition_ = {}, VULKAN_HPP_NAMESPACE::Offset2D maxDstPosition_ = {}, VULKAN_HPP_NAMESPACE::Extent2D minDstExtent_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxDstExtent_ = {}) VULKAN_HPP_NOEXCEPT
+    : supportedAlpha( supportedAlpha_ ), minSrcPosition( minSrcPosition_ ), maxSrcPosition( maxSrcPosition_ ), minSrcExtent( minSrcExtent_ ), maxSrcExtent( maxSrcExtent_ ), minDstPosition( minDstPosition_ ), maxDstPosition( maxDstPosition_ ), minDstExtent( minDstExtent_ ), maxDstExtent( maxDstExtent_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilitiesKHR( DisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DisplayPlaneCapabilitiesKHR( VkDisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DisplayPlaneCapabilitiesKHR & operator=( VkDisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR const *>( &rhs );
+      return *this;
+    }
+
+    DisplayPlaneCapabilitiesKHR & operator=( DisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DisplayPlaneCapabilitiesKHR ) );
+      return *this;
+    }
+
+
+    operator VkDisplayPlaneCapabilitiesKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDisplayPlaneCapabilitiesKHR*>( this );
+    }
+
+    operator VkDisplayPlaneCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDisplayPlaneCapabilitiesKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DisplayPlaneCapabilitiesKHR const& ) const = default;
+#else
+    bool operator==( DisplayPlaneCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( supportedAlpha == rhs.supportedAlpha )
+          && ( minSrcPosition == rhs.minSrcPosition )
+          && ( maxSrcPosition == rhs.maxSrcPosition )
+          && ( minSrcExtent == rhs.minSrcExtent )
+          && ( maxSrcExtent == rhs.maxSrcExtent )
+          && ( minDstPosition == rhs.minDstPosition )
+          && ( maxDstPosition == rhs.maxDstPosition )
+          && ( minDstExtent == rhs.minDstExtent )
+          && ( maxDstExtent == rhs.maxDstExtent );
+    }
+
+    bool operator!=( DisplayPlaneCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagsKHR supportedAlpha = {};
+    VULKAN_HPP_NAMESPACE::Offset2D minSrcPosition = {};
+    VULKAN_HPP_NAMESPACE::Offset2D maxSrcPosition = {};
+    VULKAN_HPP_NAMESPACE::Extent2D minSrcExtent = {};
+    VULKAN_HPP_NAMESPACE::Extent2D maxSrcExtent = {};
+    VULKAN_HPP_NAMESPACE::Offset2D minDstPosition = {};
+    VULKAN_HPP_NAMESPACE::Offset2D maxDstPosition = {};
+    VULKAN_HPP_NAMESPACE::Extent2D minDstExtent = {};
+    VULKAN_HPP_NAMESPACE::Extent2D maxDstExtent = {};
+
+  };
+  static_assert( sizeof( DisplayPlaneCapabilitiesKHR ) == sizeof( VkDisplayPlaneCapabilitiesKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DisplayPlaneCapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct DisplayPlaneCapabilities2KHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPlaneCapabilities2KHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilities2KHR(VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities_ = {}) VULKAN_HPP_NOEXCEPT
+    : capabilities( capabilities_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilities2KHR( DisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DisplayPlaneCapabilities2KHR( VkDisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DisplayPlaneCapabilities2KHR & operator=( VkDisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR const *>( &rhs );
+      return *this;
+    }
+
+    DisplayPlaneCapabilities2KHR & operator=( DisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DisplayPlaneCapabilities2KHR ) );
+      return *this;
+    }
+
+
+    operator VkDisplayPlaneCapabilities2KHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDisplayPlaneCapabilities2KHR*>( this );
+    }
+
+    operator VkDisplayPlaneCapabilities2KHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDisplayPlaneCapabilities2KHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DisplayPlaneCapabilities2KHR const& ) const = default;
+#else
+    bool operator==( DisplayPlaneCapabilities2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( capabilities == rhs.capabilities );
+    }
+
+    bool operator!=( DisplayPlaneCapabilities2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneCapabilities2KHR;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities = {};
+
+  };
+  static_assert( sizeof( DisplayPlaneCapabilities2KHR ) == sizeof( VkDisplayPlaneCapabilities2KHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DisplayPlaneCapabilities2KHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eDisplayPlaneCapabilities2KHR>
+  {
+    using Type = DisplayPlaneCapabilities2KHR;
+  };
+
+  struct DisplayPlanePropertiesKHR
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DisplayPlanePropertiesKHR(VULKAN_HPP_NAMESPACE::DisplayKHR currentDisplay_ = {}, uint32_t currentStackIndex_ = {}) VULKAN_HPP_NOEXCEPT
+    : currentDisplay( currentDisplay_ ), currentStackIndex( currentStackIndex_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DisplayPlanePropertiesKHR( DisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DisplayPlanePropertiesKHR( VkDisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DisplayPlanePropertiesKHR & operator=( VkDisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR const *>( &rhs );
+      return *this;
+    }
+
+    DisplayPlanePropertiesKHR & operator=( DisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DisplayPlanePropertiesKHR ) );
+      return *this;
+    }
+
+
+    operator VkDisplayPlanePropertiesKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDisplayPlanePropertiesKHR*>( this );
+    }
+
+    operator VkDisplayPlanePropertiesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDisplayPlanePropertiesKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DisplayPlanePropertiesKHR const& ) const = default;
+#else
+    bool operator==( DisplayPlanePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( currentDisplay == rhs.currentDisplay )
+          && ( currentStackIndex == rhs.currentStackIndex );
+    }
+
+    bool operator!=( DisplayPlanePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::DisplayKHR currentDisplay = {};
+    uint32_t currentStackIndex = {};
+
+  };
+  static_assert( sizeof( DisplayPlanePropertiesKHR ) == sizeof( VkDisplayPlanePropertiesKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DisplayPlanePropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct DisplayPlaneProperties2KHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPlaneProperties2KHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DisplayPlaneProperties2KHR(VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR displayPlaneProperties_ = {}) VULKAN_HPP_NOEXCEPT
+    : displayPlaneProperties( displayPlaneProperties_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DisplayPlaneProperties2KHR( DisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DisplayPlaneProperties2KHR( VkDisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DisplayPlaneProperties2KHR & operator=( VkDisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR const *>( &rhs );
+      return *this;
+    }
+
+    DisplayPlaneProperties2KHR & operator=( DisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DisplayPlaneProperties2KHR ) );
+      return *this;
+    }
+
+
+    operator VkDisplayPlaneProperties2KHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDisplayPlaneProperties2KHR*>( this );
+    }
+
+    operator VkDisplayPlaneProperties2KHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDisplayPlaneProperties2KHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DisplayPlaneProperties2KHR const& ) const = default;
+#else
+    bool operator==( DisplayPlaneProperties2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( displayPlaneProperties == rhs.displayPlaneProperties );
+    }
+
+    bool operator!=( DisplayPlaneProperties2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneProperties2KHR;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR displayPlaneProperties = {};
+
+  };
+  static_assert( sizeof( DisplayPlaneProperties2KHR ) == sizeof( VkDisplayPlaneProperties2KHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DisplayPlaneProperties2KHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eDisplayPlaneProperties2KHR>
+  {
+    using Type = DisplayPlaneProperties2KHR;
+  };
+
+  struct DisplayPropertiesKHR
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DisplayPropertiesKHR(VULKAN_HPP_NAMESPACE::DisplayKHR display_ = {}, const char* displayName_ = {}, VULKAN_HPP_NAMESPACE::Extent2D physicalDimensions_ = {}, VULKAN_HPP_NAMESPACE::Extent2D physicalResolution_ = {}, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {}, VULKAN_HPP_NAMESPACE::Bool32 planeReorderPossible_ = {}, VULKAN_HPP_NAMESPACE::Bool32 persistentContent_ = {}) VULKAN_HPP_NOEXCEPT
+    : display( display_ ), displayName( displayName_ ), physicalDimensions( physicalDimensions_ ), physicalResolution( physicalResolution_ ), supportedTransforms( supportedTransforms_ ), planeReorderPossible( planeReorderPossible_ ), persistentContent( persistentContent_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DisplayPropertiesKHR( DisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DisplayPropertiesKHR( VkDisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DisplayPropertiesKHR & operator=( VkDisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR const *>( &rhs );
+      return *this;
+    }
+
+    DisplayPropertiesKHR & operator=( DisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DisplayPropertiesKHR ) );
+      return *this;
+    }
+
+
+    operator VkDisplayPropertiesKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDisplayPropertiesKHR*>( this );
+    }
+
+    operator VkDisplayPropertiesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDisplayPropertiesKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DisplayPropertiesKHR const& ) const = default;
+#else
+    bool operator==( DisplayPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( display == rhs.display )
+          && ( displayName == rhs.displayName )
+          && ( physicalDimensions == rhs.physicalDimensions )
+          && ( physicalResolution == rhs.physicalResolution )
+          && ( supportedTransforms == rhs.supportedTransforms )
+          && ( planeReorderPossible == rhs.planeReorderPossible )
+          && ( persistentContent == rhs.persistentContent );
+    }
+
+    bool operator!=( DisplayPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::DisplayKHR display = {};
+    const char* displayName = {};
+    VULKAN_HPP_NAMESPACE::Extent2D physicalDimensions = {};
+    VULKAN_HPP_NAMESPACE::Extent2D physicalResolution = {};
+    VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {};
+    VULKAN_HPP_NAMESPACE::Bool32 planeReorderPossible = {};
+    VULKAN_HPP_NAMESPACE::Bool32 persistentContent = {};
+
+  };
+  static_assert( sizeof( DisplayPropertiesKHR ) == sizeof( VkDisplayPropertiesKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DisplayPropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct DisplayProperties2KHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayProperties2KHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DisplayProperties2KHR(VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR displayProperties_ = {}) VULKAN_HPP_NOEXCEPT
+    : displayProperties( displayProperties_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DisplayProperties2KHR( DisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DisplayProperties2KHR( VkDisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DisplayProperties2KHR & operator=( VkDisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR const *>( &rhs );
+      return *this;
+    }
+
+    DisplayProperties2KHR & operator=( DisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DisplayProperties2KHR ) );
+      return *this;
+    }
+
+
+    operator VkDisplayProperties2KHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDisplayProperties2KHR*>( this );
+    }
+
+    operator VkDisplayProperties2KHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDisplayProperties2KHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DisplayProperties2KHR const& ) const = default;
+#else
+    bool operator==( DisplayProperties2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( displayProperties == rhs.displayProperties );
+    }
+
+    bool operator!=( DisplayProperties2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayProperties2KHR;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR displayProperties = {};
+
+  };
+  static_assert( sizeof( DisplayProperties2KHR ) == sizeof( VkDisplayProperties2KHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DisplayProperties2KHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eDisplayProperties2KHR>
+  {
+    using Type = DisplayProperties2KHR;
+  };
+
+  struct PhysicalDeviceExternalBufferInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalBufferInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalBufferInfo(VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), usage( usage_ ), handleType( handleType_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalBufferInfo( PhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceExternalBufferInfo( VkPhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceExternalBufferInfo & operator=( VkPhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceExternalBufferInfo & operator=( PhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceExternalBufferInfo ) );
+      return *this;
+    }
+
+    PhysicalDeviceExternalBufferInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceExternalBufferInfo & setFlags( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    PhysicalDeviceExternalBufferInfo & setUsage( VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      usage = usage_;
+      return *this;
+    }
+
+    PhysicalDeviceExternalBufferInfo & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceExternalBufferInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo*>( this );
+    }
+
+    operator VkPhysicalDeviceExternalBufferInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceExternalBufferInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceExternalBufferInfo const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceExternalBufferInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( usage == rhs.usage )
+          && ( handleType == rhs.handleType );
+    }
+
+    bool operator!=( PhysicalDeviceExternalBufferInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalBufferInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::BufferCreateFlags flags = {};
+    VULKAN_HPP_NAMESPACE::BufferUsageFlags usage = {};
+    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
+
+  };
+  static_assert( sizeof( PhysicalDeviceExternalBufferInfo ) == sizeof( VkPhysicalDeviceExternalBufferInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceExternalBufferInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceExternalBufferInfo>
+  {
+    using Type = PhysicalDeviceExternalBufferInfo;
+  };
+  using PhysicalDeviceExternalBufferInfoKHR = PhysicalDeviceExternalBufferInfo;
+
+  struct ExternalMemoryProperties
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ExternalMemoryProperties(VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlags externalMemoryFeatures_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags exportFromImportedHandleTypes_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags compatibleHandleTypes_ = {}) VULKAN_HPP_NOEXCEPT
+    : externalMemoryFeatures( externalMemoryFeatures_ ), exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ), compatibleHandleTypes( compatibleHandleTypes_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ExternalMemoryProperties( ExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ExternalMemoryProperties( VkExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ExternalMemoryProperties & operator=( VkExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalMemoryProperties const *>( &rhs );
+      return *this;
+    }
+
+    ExternalMemoryProperties & operator=( ExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ExternalMemoryProperties ) );
+      return *this;
+    }
+
+
+    operator VkExternalMemoryProperties const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExternalMemoryProperties*>( this );
+    }
+
+    operator VkExternalMemoryProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExternalMemoryProperties*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ExternalMemoryProperties const& ) const = default;
+#else
+    bool operator==( ExternalMemoryProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( externalMemoryFeatures == rhs.externalMemoryFeatures )
+          && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes )
+          && ( compatibleHandleTypes == rhs.compatibleHandleTypes );
+    }
+
+    bool operator!=( ExternalMemoryProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlags externalMemoryFeatures = {};
+    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags exportFromImportedHandleTypes = {};
+    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags compatibleHandleTypes = {};
+
+  };
+  static_assert( sizeof( ExternalMemoryProperties ) == sizeof( VkExternalMemoryProperties ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ExternalMemoryProperties>::value, "struct wrapper is not a standard layout!" );
+  using ExternalMemoryPropertiesKHR = ExternalMemoryProperties;
+
+  struct ExternalBufferProperties
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalBufferProperties;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ExternalBufferProperties(VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties_ = {}) VULKAN_HPP_NOEXCEPT
+    : externalMemoryProperties( externalMemoryProperties_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ExternalBufferProperties( ExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ExternalBufferProperties( VkExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ExternalBufferProperties & operator=( VkExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalBufferProperties const *>( &rhs );
+      return *this;
+    }
+
+    ExternalBufferProperties & operator=( ExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ExternalBufferProperties ) );
+      return *this;
+    }
+
+
+    operator VkExternalBufferProperties const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExternalBufferProperties*>( this );
+    }
+
+    operator VkExternalBufferProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExternalBufferProperties*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ExternalBufferProperties const& ) const = default;
+#else
+    bool operator==( ExternalBufferProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( externalMemoryProperties == rhs.externalMemoryProperties );
+    }
+
+    bool operator!=( ExternalBufferProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalBufferProperties;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties = {};
+
+  };
+  static_assert( sizeof( ExternalBufferProperties ) == sizeof( VkExternalBufferProperties ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ExternalBufferProperties>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eExternalBufferProperties>
+  {
+    using Type = ExternalBufferProperties;
+  };
+  using ExternalBufferPropertiesKHR = ExternalBufferProperties;
+
+  struct PhysicalDeviceExternalFenceInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalFenceInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFenceInfo(VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd) VULKAN_HPP_NOEXCEPT
+    : handleType( handleType_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFenceInfo( PhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceExternalFenceInfo( VkPhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceExternalFenceInfo & operator=( VkPhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceExternalFenceInfo & operator=( PhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceExternalFenceInfo ) );
+      return *this;
+    }
+
+    PhysicalDeviceExternalFenceInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceExternalFenceInfo & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceExternalFenceInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo*>( this );
+    }
+
+    operator VkPhysicalDeviceExternalFenceInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceExternalFenceInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceExternalFenceInfo const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceExternalFenceInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( handleType == rhs.handleType );
+    }
+
+    bool operator!=( PhysicalDeviceExternalFenceInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalFenceInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd;
+
+  };
+  static_assert( sizeof( PhysicalDeviceExternalFenceInfo ) == sizeof( VkPhysicalDeviceExternalFenceInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceExternalFenceInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceExternalFenceInfo>
+  {
+    using Type = PhysicalDeviceExternalFenceInfo;
+  };
+  using PhysicalDeviceExternalFenceInfoKHR = PhysicalDeviceExternalFenceInfo;
+
+  struct ExternalFenceProperties
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalFenceProperties;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ExternalFenceProperties(VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags exportFromImportedHandleTypes_ = {}, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags compatibleHandleTypes_ = {}, VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags externalFenceFeatures_ = {}) VULKAN_HPP_NOEXCEPT
+    : exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ), compatibleHandleTypes( compatibleHandleTypes_ ), externalFenceFeatures( externalFenceFeatures_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ExternalFenceProperties( ExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ExternalFenceProperties( VkExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ExternalFenceProperties & operator=( VkExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalFenceProperties const *>( &rhs );
+      return *this;
+    }
+
+    ExternalFenceProperties & operator=( ExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ExternalFenceProperties ) );
+      return *this;
+    }
+
+
+    operator VkExternalFenceProperties const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExternalFenceProperties*>( this );
+    }
+
+    operator VkExternalFenceProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExternalFenceProperties*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ExternalFenceProperties const& ) const = default;
+#else
+    bool operator==( ExternalFenceProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes )
+          && ( compatibleHandleTypes == rhs.compatibleHandleTypes )
+          && ( externalFenceFeatures == rhs.externalFenceFeatures );
+    }
+
+    bool operator!=( ExternalFenceProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalFenceProperties;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags exportFromImportedHandleTypes = {};
+    VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags compatibleHandleTypes = {};
+    VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags externalFenceFeatures = {};
+
+  };
+  static_assert( sizeof( ExternalFenceProperties ) == sizeof( VkExternalFenceProperties ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ExternalFenceProperties>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eExternalFenceProperties>
+  {
+    using Type = ExternalFenceProperties;
+  };
+  using ExternalFencePropertiesKHR = ExternalFenceProperties;
+
+  struct ImageFormatProperties
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ImageFormatProperties(VULKAN_HPP_NAMESPACE::Extent3D maxExtent_ = {}, uint32_t maxMipLevels_ = {}, uint32_t maxArrayLayers_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize maxResourceSize_ = {}) VULKAN_HPP_NOEXCEPT
+    : maxExtent( maxExtent_ ), maxMipLevels( maxMipLevels_ ), maxArrayLayers( maxArrayLayers_ ), sampleCounts( sampleCounts_ ), maxResourceSize( maxResourceSize_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImageFormatProperties( ImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImageFormatProperties( VkImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ImageFormatProperties & operator=( VkImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageFormatProperties const *>( &rhs );
+      return *this;
+    }
+
+    ImageFormatProperties & operator=( ImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ImageFormatProperties ) );
+      return *this;
+    }
+
+
+    operator VkImageFormatProperties const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageFormatProperties*>( this );
+    }
+
+    operator VkImageFormatProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageFormatProperties*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ImageFormatProperties const& ) const = default;
+#else
+    bool operator==( ImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( maxExtent == rhs.maxExtent )
+          && ( maxMipLevels == rhs.maxMipLevels )
+          && ( maxArrayLayers == rhs.maxArrayLayers )
+          && ( sampleCounts == rhs.sampleCounts )
+          && ( maxResourceSize == rhs.maxResourceSize );
+    }
+
+    bool operator!=( ImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::Extent3D maxExtent = {};
+    uint32_t maxMipLevels = {};
+    uint32_t maxArrayLayers = {};
+    VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize maxResourceSize = {};
+
+  };
+  static_assert( sizeof( ImageFormatProperties ) == sizeof( VkImageFormatProperties ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImageFormatProperties>::value, "struct wrapper is not a standard layout!" );
+
+  struct ExternalImageFormatPropertiesNV
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ExternalImageFormatPropertiesNV(VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagsNV externalMemoryFeatures_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes_ = {}) VULKAN_HPP_NOEXCEPT
+    : imageFormatProperties( imageFormatProperties_ ), externalMemoryFeatures( externalMemoryFeatures_ ), exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ), compatibleHandleTypes( compatibleHandleTypes_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ExternalImageFormatPropertiesNV( ExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ExternalImageFormatPropertiesNV( VkExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ExternalImageFormatPropertiesNV & operator=( VkExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV const *>( &rhs );
+      return *this;
+    }
+
+    ExternalImageFormatPropertiesNV & operator=( ExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ExternalImageFormatPropertiesNV ) );
+      return *this;
+    }
+
+
+    operator VkExternalImageFormatPropertiesNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExternalImageFormatPropertiesNV*>( this );
+    }
+
+    operator VkExternalImageFormatPropertiesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExternalImageFormatPropertiesNV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ExternalImageFormatPropertiesNV const& ) const = default;
+#else
+    bool operator==( ExternalImageFormatPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( imageFormatProperties == rhs.imageFormatProperties )
+          && ( externalMemoryFeatures == rhs.externalMemoryFeatures )
+          && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes )
+          && ( compatibleHandleTypes == rhs.compatibleHandleTypes );
+    }
+
+    bool operator!=( ExternalImageFormatPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties = {};
+    VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagsNV externalMemoryFeatures = {};
+    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes = {};
+    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes = {};
+
+  };
+  static_assert( sizeof( ExternalImageFormatPropertiesNV ) == sizeof( VkExternalImageFormatPropertiesNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ExternalImageFormatPropertiesNV>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceExternalSemaphoreInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalSemaphoreInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalSemaphoreInfo(VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd) VULKAN_HPP_NOEXCEPT
+    : handleType( handleType_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalSemaphoreInfo( PhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceExternalSemaphoreInfo( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceExternalSemaphoreInfo & operator=( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceExternalSemaphoreInfo & operator=( PhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceExternalSemaphoreInfo ) );
+      return *this;
+    }
+
+    PhysicalDeviceExternalSemaphoreInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceExternalSemaphoreInfo & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceExternalSemaphoreInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo*>( this );
+    }
+
+    operator VkPhysicalDeviceExternalSemaphoreInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceExternalSemaphoreInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceExternalSemaphoreInfo const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceExternalSemaphoreInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( handleType == rhs.handleType );
+    }
+
+    bool operator!=( PhysicalDeviceExternalSemaphoreInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalSemaphoreInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
+
+  };
+  static_assert( sizeof( PhysicalDeviceExternalSemaphoreInfo ) == sizeof( VkPhysicalDeviceExternalSemaphoreInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceExternalSemaphoreInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceExternalSemaphoreInfo>
+  {
+    using Type = PhysicalDeviceExternalSemaphoreInfo;
+  };
+  using PhysicalDeviceExternalSemaphoreInfoKHR = PhysicalDeviceExternalSemaphoreInfo;
+
+  struct ExternalSemaphoreProperties
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalSemaphoreProperties;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ExternalSemaphoreProperties(VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes_ = {}, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags compatibleHandleTypes_ = {}, VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags externalSemaphoreFeatures_ = {}) VULKAN_HPP_NOEXCEPT
+    : exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ), compatibleHandleTypes( compatibleHandleTypes_ ), externalSemaphoreFeatures( externalSemaphoreFeatures_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ExternalSemaphoreProperties( ExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ExternalSemaphoreProperties( VkExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ExternalSemaphoreProperties & operator=( VkExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties const *>( &rhs );
+      return *this;
+    }
+
+    ExternalSemaphoreProperties & operator=( ExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ExternalSemaphoreProperties ) );
+      return *this;
+    }
+
+
+    operator VkExternalSemaphoreProperties const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExternalSemaphoreProperties*>( this );
+    }
+
+    operator VkExternalSemaphoreProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExternalSemaphoreProperties*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ExternalSemaphoreProperties const& ) const = default;
+#else
+    bool operator==( ExternalSemaphoreProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes )
+          && ( compatibleHandleTypes == rhs.compatibleHandleTypes )
+          && ( externalSemaphoreFeatures == rhs.externalSemaphoreFeatures );
+    }
+
+    bool operator!=( ExternalSemaphoreProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalSemaphoreProperties;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes = {};
+    VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags compatibleHandleTypes = {};
+    VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags externalSemaphoreFeatures = {};
+
+  };
+  static_assert( sizeof( ExternalSemaphoreProperties ) == sizeof( VkExternalSemaphoreProperties ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ExternalSemaphoreProperties>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eExternalSemaphoreProperties>
+  {
+    using Type = ExternalSemaphoreProperties;
+  };
+  using ExternalSemaphorePropertiesKHR = ExternalSemaphoreProperties;
+
+  struct PhysicalDeviceFeatures2
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFeatures2;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures2(VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features_ = {}) VULKAN_HPP_NOEXCEPT
+    : features( features_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures2( PhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceFeatures2( VkPhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceFeatures2 & operator=( VkPhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceFeatures2 & operator=( PhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceFeatures2 ) );
+      return *this;
+    }
+
+    PhysicalDeviceFeatures2 & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures2 & setFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures const & features_ ) VULKAN_HPP_NOEXCEPT
+    {
+      features = features_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceFeatures2 const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceFeatures2*>( this );
+    }
+
+    operator VkPhysicalDeviceFeatures2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceFeatures2*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceFeatures2 const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceFeatures2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( features == rhs.features );
+    }
+
+    bool operator!=( PhysicalDeviceFeatures2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFeatures2;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceFeatures2 ) == sizeof( VkPhysicalDeviceFeatures2 ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceFeatures2>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceFeatures2>
+  {
+    using Type = PhysicalDeviceFeatures2;
+  };
+  using PhysicalDeviceFeatures2KHR = PhysicalDeviceFeatures2;
+
+  struct FormatProperties
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR FormatProperties(VULKAN_HPP_NAMESPACE::FormatFeatureFlags linearTilingFeatures_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags optimalTilingFeatures_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags bufferFeatures_ = {}) VULKAN_HPP_NOEXCEPT
+    : linearTilingFeatures( linearTilingFeatures_ ), optimalTilingFeatures( optimalTilingFeatures_ ), bufferFeatures( bufferFeatures_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR FormatProperties( FormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    FormatProperties( VkFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    FormatProperties & operator=( VkFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FormatProperties const *>( &rhs );
+      return *this;
+    }
+
+    FormatProperties & operator=( FormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( FormatProperties ) );
+      return *this;
+    }
+
+
+    operator VkFormatProperties const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkFormatProperties*>( this );
+    }
+
+    operator VkFormatProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkFormatProperties*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( FormatProperties const& ) const = default;
+#else
+    bool operator==( FormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( linearTilingFeatures == rhs.linearTilingFeatures )
+          && ( optimalTilingFeatures == rhs.optimalTilingFeatures )
+          && ( bufferFeatures == rhs.bufferFeatures );
+    }
+
+    bool operator!=( FormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::FormatFeatureFlags linearTilingFeatures = {};
+    VULKAN_HPP_NAMESPACE::FormatFeatureFlags optimalTilingFeatures = {};
+    VULKAN_HPP_NAMESPACE::FormatFeatureFlags bufferFeatures = {};
+
+  };
+  static_assert( sizeof( FormatProperties ) == sizeof( VkFormatProperties ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<FormatProperties>::value, "struct wrapper is not a standard layout!" );
+
+  struct FormatProperties2
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFormatProperties2;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR FormatProperties2(VULKAN_HPP_NAMESPACE::FormatProperties formatProperties_ = {}) VULKAN_HPP_NOEXCEPT
+    : formatProperties( formatProperties_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR FormatProperties2( FormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    FormatProperties2( VkFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    FormatProperties2 & operator=( VkFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FormatProperties2 const *>( &rhs );
+      return *this;
+    }
+
+    FormatProperties2 & operator=( FormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( FormatProperties2 ) );
+      return *this;
+    }
+
+
+    operator VkFormatProperties2 const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkFormatProperties2*>( this );
+    }
+
+    operator VkFormatProperties2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkFormatProperties2*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( FormatProperties2 const& ) const = default;
+#else
+    bool operator==( FormatProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( formatProperties == rhs.formatProperties );
+    }
+
+    bool operator!=( FormatProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFormatProperties2;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::FormatProperties formatProperties = {};
+
+  };
+  static_assert( sizeof( FormatProperties2 ) == sizeof( VkFormatProperties2 ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<FormatProperties2>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eFormatProperties2>
+  {
+    using Type = FormatProperties2;
+  };
+  using FormatProperties2KHR = FormatProperties2;
+
+  struct PhysicalDeviceFragmentShadingRateKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShadingRateKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateKHR(VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts_ = {}, VULKAN_HPP_NAMESPACE::Extent2D fragmentSize_ = {}) VULKAN_HPP_NOEXCEPT
+    : sampleCounts( sampleCounts_ ), fragmentSize( fragmentSize_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateKHR( PhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceFragmentShadingRateKHR( VkPhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceFragmentShadingRateKHR & operator=( VkPhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceFragmentShadingRateKHR & operator=( PhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceFragmentShadingRateKHR ) );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceFragmentShadingRateKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceFragmentShadingRateKHR*>( this );
+    }
+
+    operator VkPhysicalDeviceFragmentShadingRateKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceFragmentShadingRateKHR const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceFragmentShadingRateKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( sampleCounts == rhs.sampleCounts )
+          && ( fragmentSize == rhs.fragmentSize );
+    }
+
+    bool operator!=( PhysicalDeviceFragmentShadingRateKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateKHR;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts = {};
+    VULKAN_HPP_NAMESPACE::Extent2D fragmentSize = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceFragmentShadingRateKHR ) == sizeof( VkPhysicalDeviceFragmentShadingRateKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceFragmentShadingRateKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShadingRateKHR>
+  {
+    using Type = PhysicalDeviceFragmentShadingRateKHR;
+  };
+
+  struct PhysicalDeviceImageFormatInfo2
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageFormatInfo2;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceImageFormatInfo2(VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::ImageType type_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {}) VULKAN_HPP_NOEXCEPT
+    : format( format_ ), type( type_ ), tiling( tiling_ ), usage( usage_ ), flags( flags_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceImageFormatInfo2( PhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceImageFormatInfo2( VkPhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceImageFormatInfo2 & operator=( VkPhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceImageFormatInfo2 & operator=( PhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceImageFormatInfo2 ) );
+      return *this;
+    }
+
+    PhysicalDeviceImageFormatInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceImageFormatInfo2 & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
+    {
+      format = format_;
+      return *this;
+    }
+
+    PhysicalDeviceImageFormatInfo2 & setType( VULKAN_HPP_NAMESPACE::ImageType type_ ) VULKAN_HPP_NOEXCEPT
+    {
+      type = type_;
+      return *this;
+    }
+
+    PhysicalDeviceImageFormatInfo2 & setTiling( VULKAN_HPP_NAMESPACE::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT
+    {
+      tiling = tiling_;
+      return *this;
+    }
+
+    PhysicalDeviceImageFormatInfo2 & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      usage = usage_;
+      return *this;
+    }
+
+    PhysicalDeviceImageFormatInfo2 & setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceImageFormatInfo2 const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2*>( this );
+    }
+
+    operator VkPhysicalDeviceImageFormatInfo2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceImageFormatInfo2*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceImageFormatInfo2 const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceImageFormatInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( format == rhs.format )
+          && ( type == rhs.type )
+          && ( tiling == rhs.tiling )
+          && ( usage == rhs.usage )
+          && ( flags == rhs.flags );
+    }
+
+    bool operator!=( PhysicalDeviceImageFormatInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageFormatInfo2;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+    VULKAN_HPP_NAMESPACE::ImageType type = VULKAN_HPP_NAMESPACE::ImageType::e1D;
+    VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal;
+    VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {};
+    VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceImageFormatInfo2 ) == sizeof( VkPhysicalDeviceImageFormatInfo2 ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceImageFormatInfo2>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceImageFormatInfo2>
+  {
+    using Type = PhysicalDeviceImageFormatInfo2;
+  };
+  using PhysicalDeviceImageFormatInfo2KHR = PhysicalDeviceImageFormatInfo2;
+
+  struct ImageFormatProperties2
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageFormatProperties2;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ImageFormatProperties2(VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties_ = {}) VULKAN_HPP_NOEXCEPT
+    : imageFormatProperties( imageFormatProperties_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImageFormatProperties2( ImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImageFormatProperties2( VkImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ImageFormatProperties2 & operator=( VkImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageFormatProperties2 const *>( &rhs );
+      return *this;
+    }
+
+    ImageFormatProperties2 & operator=( ImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ImageFormatProperties2 ) );
+      return *this;
+    }
+
+
+    operator VkImageFormatProperties2 const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageFormatProperties2*>( this );
+    }
+
+    operator VkImageFormatProperties2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageFormatProperties2*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ImageFormatProperties2 const& ) const = default;
+#else
+    bool operator==( ImageFormatProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( imageFormatProperties == rhs.imageFormatProperties );
+    }
+
+    bool operator!=( ImageFormatProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageFormatProperties2;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties = {};
+
+  };
+  static_assert( sizeof( ImageFormatProperties2 ) == sizeof( VkImageFormatProperties2 ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImageFormatProperties2>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eImageFormatProperties2>
+  {
+    using Type = ImageFormatProperties2;
+  };
+  using ImageFormatProperties2KHR = ImageFormatProperties2;
+
+  struct MemoryType
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR MemoryType(VULKAN_HPP_NAMESPACE::MemoryPropertyFlags propertyFlags_ = {}, uint32_t heapIndex_ = {}) VULKAN_HPP_NOEXCEPT
+    : propertyFlags( propertyFlags_ ), heapIndex( heapIndex_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR MemoryType( MemoryType const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    MemoryType( VkMemoryType const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    MemoryType & operator=( VkMemoryType const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryType const *>( &rhs );
+      return *this;
+    }
+
+    MemoryType & operator=( MemoryType const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( MemoryType ) );
+      return *this;
+    }
+
+
+    operator VkMemoryType const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMemoryType*>( this );
+    }
+
+    operator VkMemoryType &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMemoryType*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( MemoryType const& ) const = default;
+#else
+    bool operator==( MemoryType const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( propertyFlags == rhs.propertyFlags )
+          && ( heapIndex == rhs.heapIndex );
+    }
+
+    bool operator!=( MemoryType const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::MemoryPropertyFlags propertyFlags = {};
+    uint32_t heapIndex = {};
+
+  };
+  static_assert( sizeof( MemoryType ) == sizeof( VkMemoryType ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<MemoryType>::value, "struct wrapper is not a standard layout!" );
+
+  struct MemoryHeap
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR MemoryHeap(VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::MemoryHeapFlags flags_ = {}) VULKAN_HPP_NOEXCEPT
+    : size( size_ ), flags( flags_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR MemoryHeap( MemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    MemoryHeap( VkMemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    MemoryHeap & operator=( VkMemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryHeap const *>( &rhs );
+      return *this;
+    }
+
+    MemoryHeap & operator=( MemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( MemoryHeap ) );
+      return *this;
+    }
+
+
+    operator VkMemoryHeap const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMemoryHeap*>( this );
+    }
+
+    operator VkMemoryHeap &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMemoryHeap*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( MemoryHeap const& ) const = default;
+#else
+    bool operator==( MemoryHeap const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( size == rhs.size )
+          && ( flags == rhs.flags );
+    }
+
+    bool operator!=( MemoryHeap const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::DeviceSize size = {};
+    VULKAN_HPP_NAMESPACE::MemoryHeapFlags flags = {};
+
+  };
+  static_assert( sizeof( MemoryHeap ) == sizeof( VkMemoryHeap ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<MemoryHeap>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceMemoryProperties
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties(uint32_t memoryTypeCount_ = {}, std::array<VULKAN_HPP_NAMESPACE::MemoryType,VK_MAX_MEMORY_TYPES> const& memoryTypes_ = {}, uint32_t memoryHeapCount_ = {}, std::array<VULKAN_HPP_NAMESPACE::MemoryHeap,VK_MAX_MEMORY_HEAPS> const& memoryHeaps_ = {}) VULKAN_HPP_NOEXCEPT
+    : memoryTypeCount( memoryTypeCount_ ), memoryTypes( memoryTypes_ ), memoryHeapCount( memoryHeapCount_ ), memoryHeaps( memoryHeaps_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties( PhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceMemoryProperties( VkPhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceMemoryProperties & operator=( VkPhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceMemoryProperties & operator=( PhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceMemoryProperties ) );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceMemoryProperties const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceMemoryProperties*>( this );
+    }
+
+    operator VkPhysicalDeviceMemoryProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceMemoryProperties*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceMemoryProperties const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceMemoryProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( memoryTypeCount == rhs.memoryTypeCount )
+          && ( memoryTypes == rhs.memoryTypes )
+          && ( memoryHeapCount == rhs.memoryHeapCount )
+          && ( memoryHeaps == rhs.memoryHeaps );
+    }
+
+    bool operator!=( PhysicalDeviceMemoryProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    uint32_t memoryTypeCount = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::MemoryType, VK_MAX_MEMORY_TYPES> memoryTypes = {};
+    uint32_t memoryHeapCount = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::MemoryHeap, VK_MAX_MEMORY_HEAPS> memoryHeaps = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceMemoryProperties ) == sizeof( VkPhysicalDeviceMemoryProperties ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceMemoryProperties>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceMemoryProperties2
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryProperties2;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties2(VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties_ = {}) VULKAN_HPP_NOEXCEPT
+    : memoryProperties( memoryProperties_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties2( PhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceMemoryProperties2( VkPhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceMemoryProperties2 & operator=( VkPhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceMemoryProperties2 & operator=( PhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceMemoryProperties2 ) );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceMemoryProperties2 const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceMemoryProperties2*>( this );
+    }
+
+    operator VkPhysicalDeviceMemoryProperties2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceMemoryProperties2*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceMemoryProperties2 const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceMemoryProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memoryProperties == rhs.memoryProperties );
+    }
+
+    bool operator!=( PhysicalDeviceMemoryProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryProperties2;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceMemoryProperties2 ) == sizeof( VkPhysicalDeviceMemoryProperties2 ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceMemoryProperties2>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceMemoryProperties2>
+  {
+    using Type = PhysicalDeviceMemoryProperties2;
+  };
+  using PhysicalDeviceMemoryProperties2KHR = PhysicalDeviceMemoryProperties2;
+
+  struct MultisamplePropertiesEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMultisamplePropertiesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR MultisamplePropertiesEXT(VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize_ = {}) VULKAN_HPP_NOEXCEPT
+    : maxSampleLocationGridSize( maxSampleLocationGridSize_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR MultisamplePropertiesEXT( MultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    MultisamplePropertiesEXT( VkMultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    MultisamplePropertiesEXT & operator=( VkMultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT const *>( &rhs );
+      return *this;
+    }
+
+    MultisamplePropertiesEXT & operator=( MultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( MultisamplePropertiesEXT ) );
+      return *this;
+    }
+
+
+    operator VkMultisamplePropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMultisamplePropertiesEXT*>( this );
+    }
+
+    operator VkMultisamplePropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMultisamplePropertiesEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( MultisamplePropertiesEXT const& ) const = default;
+#else
+    bool operator==( MultisamplePropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize );
+    }
+
+    bool operator!=( MultisamplePropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMultisamplePropertiesEXT;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize = {};
+
+  };
+  static_assert( sizeof( MultisamplePropertiesEXT ) == sizeof( VkMultisamplePropertiesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<MultisamplePropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eMultisamplePropertiesEXT>
+  {
+    using Type = MultisamplePropertiesEXT;
+  };
 
   struct PhysicalDeviceLimits
   {
-    operator VkPhysicalDeviceLimits const&() const
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLimits(uint32_t maxImageDimension1D_ = {}, uint32_t maxImageDimension2D_ = {}, uint32_t maxImageDimension3D_ = {}, uint32_t maxImageDimensionCube_ = {}, uint32_t maxImageArrayLayers_ = {}, uint32_t maxTexelBufferElements_ = {}, uint32_t maxUniformBufferRange_ = {}, uint32_t maxStorageBufferRange_ = {}, uint32_t maxPushConstantsSize_ = {}, uint32_t maxMemoryAllocationCount_ = {}, uint32_t maxSamplerAllocationCount_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize bufferImageGranularity_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize sparseAddressSpaceSize_ = {}, uint32_t maxBoundDescriptorSets_ = {}, uint32_t maxPerStageDescriptorSamplers_ = {}, uint32_t maxPerStageDescriptorUniformBuffers_ = {}, uint32_t maxPerStageDescriptorStorageBuffers_ = {}, uint32_t maxPerStageDescriptorSampledImages_ = {}, uint32_t maxPerStageDescriptorStorageImages_ = {}, uint32_t maxPerStageDescriptorInputAttachments_ = {}, uint32_t maxPerStageResources_ = {}, uint32_t maxDescriptorSetSamplers_ = {}, uint32_t maxDescriptorSetUniformBuffers_ = {}, uint32_t maxDescriptorSetUniformBuffersDynamic_ = {}, uint32_t maxDescriptorSetStorageBuffers_ = {}, uint32_t maxDescriptorSetStorageBuffersDynamic_ = {}, uint32_t maxDescriptorSetSampledImages_ = {}, uint32_t maxDescriptorSetStorageImages_ = {}, uint32_t maxDescriptorSetInputAttachments_ = {}, uint32_t maxVertexInputAttributes_ = {}, uint32_t maxVertexInputBindings_ = {}, uint32_t maxVertexInputAttributeOffset_ = {}, uint32_t maxVertexInputBindingStride_ = {}, uint32_t maxVertexOutputComponents_ = {}, uint32_t maxTessellationGenerationLevel_ = {}, uint32_t maxTessellationPatchSize_ = {}, uint32_t maxTessellationControlPerVertexInputComponents_ = {}, uint32_t maxTessellationControlPerVertexOutputComponents_ = {}, uint32_t maxTessellationControlPerPatchOutputComponents_ = {}, uint32_t maxTessellationControlTotalOutputComponents_ = {}, uint32_t maxTessellationEvaluationInputComponents_ = {}, uint32_t maxTessellationEvaluationOutputComponents_ = {}, uint32_t maxGeometryShaderInvocations_ = {}, uint32_t maxGeometryInputComponents_ = {}, uint32_t maxGeometryOutputComponents_ = {}, uint32_t maxGeometryOutputVertices_ = {}, uint32_t maxGeometryTotalOutputComponents_ = {}, uint32_t maxFragmentInputComponents_ = {}, uint32_t maxFragmentOutputAttachments_ = {}, uint32_t maxFragmentDualSrcAttachments_ = {}, uint32_t maxFragmentCombinedOutputResources_ = {}, uint32_t maxComputeSharedMemorySize_ = {}, std::array<uint32_t,3> const& maxComputeWorkGroupCount_ = {}, uint32_t maxComputeWorkGroupInvocations_ = {}, std::array<uint32_t,3> const& maxComputeWorkGroupSize_ = {}, uint32_t subPixelPrecisionBits_ = {}, uint32_t subTexelPrecisionBits_ = {}, uint32_t mipmapPrecisionBits_ = {}, uint32_t maxDrawIndexedIndexValue_ = {}, uint32_t maxDrawIndirectCount_ = {}, float maxSamplerLodBias_ = {}, float maxSamplerAnisotropy_ = {}, uint32_t maxViewports_ = {}, std::array<uint32_t,2> const& maxViewportDimensions_ = {}, std::array<float,2> const& viewportBoundsRange_ = {}, uint32_t viewportSubPixelBits_ = {}, size_t minMemoryMapAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize minTexelBufferOffsetAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize minUniformBufferOffsetAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize minStorageBufferOffsetAlignment_ = {}, int32_t minTexelOffset_ = {}, uint32_t maxTexelOffset_ = {}, int32_t minTexelGatherOffset_ = {}, uint32_t maxTexelGatherOffset_ = {}, float minInterpolationOffset_ = {}, float maxInterpolationOffset_ = {}, uint32_t subPixelInterpolationOffsetBits_ = {}, uint32_t maxFramebufferWidth_ = {}, uint32_t maxFramebufferHeight_ = {}, uint32_t maxFramebufferLayers_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferColorSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferDepthSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferStencilSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferNoAttachmentsSampleCounts_ = {}, uint32_t maxColorAttachments_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageColorSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageIntegerSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageDepthSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageStencilSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags storageImageSampleCounts_ = {}, uint32_t maxSampleMaskWords_ = {}, VULKAN_HPP_NAMESPACE::Bool32 timestampComputeAndGraphics_ = {}, float timestampPeriod_ = {}, uint32_t maxClipDistances_ = {}, uint32_t maxCullDistances_ = {}, uint32_t maxCombinedClipAndCullDistances_ = {}, uint32_t discreteQueuePriorities_ = {}, std::array<float,2> const& pointSizeRange_ = {}, std::array<float,2> const& lineWidthRange_ = {}, float pointSizeGranularity_ = {}, float lineWidthGranularity_ = {}, VULKAN_HPP_NAMESPACE::Bool32 strictLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 standardSampleLocations_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyOffsetAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyRowPitchAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize nonCoherentAtomSize_ = {}) VULKAN_HPP_NOEXCEPT
+    : maxImageDimension1D( maxImageDimension1D_ ), maxImageDimension2D( maxImageDimension2D_ ), maxImageDimension3D( maxImageDimension3D_ ), maxImageDimensionCube( maxImageDimensionCube_ ), maxImageArrayLayers( maxImageArrayLayers_ ), maxTexelBufferElements( maxTexelBufferElements_ ), maxUniformBufferRange( maxUniformBufferRange_ ), maxStorageBufferRange( maxStorageBufferRange_ ), maxPushConstantsSize( maxPushConstantsSize_ ), maxMemoryAllocationCount( maxMemoryAllocationCount_ ), maxSamplerAllocationCount( maxSamplerAllocationCount_ ), bufferImageGranularity( bufferImageGranularity_ ), sparseAddressSpaceSize( sparseAddressSpaceSize_ ), maxBoundDescriptorSets( maxBoundDescriptorSets_ ), maxPerStageDescriptorSamplers( maxPerStageDescriptorSamplers_ ), maxPerStageDescriptorUniformBuffers( maxPerStageDescriptorUniformBuffers_ ), maxPerStageDescriptorStorageBuffers( maxPerStageDescriptorStorageBuffers_ ), maxPerStageDescriptorSampledImages( maxPerStageDescriptorSampledImages_ ), maxPerStageDescriptorStorageImages( maxPerStageDescriptorStorageImages_ ), maxPerStageDescriptorInputAttachments( maxPerStageDescriptorInputAttachments_ ), maxPerStageResources( maxPerStageResources_ ), maxDescriptorSetSamplers( maxDescriptorSetSamplers_ ), maxDescriptorSetUniformBuffers( maxDescriptorSetUniformBuffers_ ), maxDescriptorSetUniformBuffersDynamic( maxDescriptorSetUniformBuffersDynamic_ ), maxDescriptorSetStorageBuffers( maxDescriptorSetStorageBuffers_ ), maxDescriptorSetStorageBuffersDynamic( maxDescriptorSetStorageBuffersDynamic_ ), maxDescriptorSetSampledImages( maxDescriptorSetSampledImages_ ), maxDescriptorSetStorageImages( maxDescriptorSetStorageImages_ ), maxDescriptorSetInputAttachments( maxDescriptorSetInputAttachments_ ), maxVertexInputAttributes( maxVertexInputAttributes_ ), maxVertexInputBindings( maxVertexInputBindings_ ), maxVertexInputAttributeOffset( maxVertexInputAttributeOffset_ ), maxVertexInputBindingStride( maxVertexInputBindingStride_ ), maxVertexOutputComponents( maxVertexOutputComponents_ ), maxTessellationGenerationLevel( maxTessellationGenerationLevel_ ), maxTessellationPatchSize( maxTessellationPatchSize_ ), maxTessellationControlPerVertexInputComponents( maxTessellationControlPerVertexInputComponents_ ), maxTessellationControlPerVertexOutputComponents( maxTessellationControlPerVertexOutputComponents_ ), maxTessellationControlPerPatchOutputComponents( maxTessellationControlPerPatchOutputComponents_ ), maxTessellationControlTotalOutputComponents( maxTessellationControlTotalOutputComponents_ ), maxTessellationEvaluationInputComponents( maxTessellationEvaluationInputComponents_ ), maxTessellationEvaluationOutputComponents( maxTessellationEvaluationOutputComponents_ ), maxGeometryShaderInvocations( maxGeometryShaderInvocations_ ), maxGeometryInputComponents( maxGeometryInputComponents_ ), maxGeometryOutputComponents( maxGeometryOutputComponents_ ), maxGeometryOutputVertices( maxGeometryOutputVertices_ ), maxGeometryTotalOutputComponents( maxGeometryTotalOutputComponents_ ), maxFragmentInputComponents( maxFragmentInputComponents_ ), maxFragmentOutputAttachments( maxFragmentOutputAttachments_ ), maxFragmentDualSrcAttachments( maxFragmentDualSrcAttachments_ ), maxFragmentCombinedOutputResources( maxFragmentCombinedOutputResources_ ), maxComputeSharedMemorySize( maxComputeSharedMemorySize_ ), maxComputeWorkGroupCount( maxComputeWorkGroupCount_ ), maxComputeWorkGroupInvocations( maxComputeWorkGroupInvocations_ ), maxComputeWorkGroupSize( maxComputeWorkGroupSize_ ), subPixelPrecisionBits( subPixelPrecisionBits_ ), subTexelPrecisionBits( subTexelPrecisionBits_ ), mipmapPrecisionBits( mipmapPrecisionBits_ ), maxDrawIndexedIndexValue( maxDrawIndexedIndexValue_ ), maxDrawIndirectCount( maxDrawIndirectCount_ ), maxSamplerLodBias( maxSamplerLodBias_ ), maxSamplerAnisotropy( maxSamplerAnisotropy_ ), maxViewports( maxViewports_ ), maxViewportDimensions( maxViewportDimensions_ ), viewportBoundsRange( viewportBoundsRange_ ), viewportSubPixelBits( viewportSubPixelBits_ ), minMemoryMapAlignment( minMemoryMapAlignment_ ), minTexelBufferOffsetAlignment( minTexelBufferOffsetAlignment_ ), minUniformBufferOffsetAlignment( minUniformBufferOffsetAlignment_ ), minStorageBufferOffsetAlignment( minStorageBufferOffsetAlignment_ ), minTexelOffset( minTexelOffset_ ), maxTexelOffset( maxTexelOffset_ ), minTexelGatherOffset( minTexelGatherOffset_ ), maxTexelGatherOffset( maxTexelGatherOffset_ ), minInterpolationOffset( minInterpolationOffset_ ), maxInterpolationOffset( maxInterpolationOffset_ ), subPixelInterpolationOffsetBits( subPixelInterpolationOffsetBits_ ), maxFramebufferWidth( maxFramebufferWidth_ ), maxFramebufferHeight( maxFramebufferHeight_ ), maxFramebufferLayers( maxFramebufferLayers_ ), framebufferColorSampleCounts( framebufferColorSampleCounts_ ), framebufferDepthSampleCounts( framebufferDepthSampleCounts_ ), framebufferStencilSampleCounts( framebufferStencilSampleCounts_ ), framebufferNoAttachmentsSampleCounts( framebufferNoAttachmentsSampleCounts_ ), maxColorAttachments( maxColorAttachments_ ), sampledImageColorSampleCounts( sampledImageColorSampleCounts_ ), sampledImageIntegerSampleCounts( sampledImageIntegerSampleCounts_ ), sampledImageDepthSampleCounts( sampledImageDepthSampleCounts_ ), sampledImageStencilSampleCounts( sampledImageStencilSampleCounts_ ), storageImageSampleCounts( storageImageSampleCounts_ ), maxSampleMaskWords( maxSampleMaskWords_ ), timestampComputeAndGraphics( timestampComputeAndGraphics_ ), timestampPeriod( timestampPeriod_ ), maxClipDistances( maxClipDistances_ ), maxCullDistances( maxCullDistances_ ), maxCombinedClipAndCullDistances( maxCombinedClipAndCullDistances_ ), discreteQueuePriorities( discreteQueuePriorities_ ), pointSizeRange( pointSizeRange_ ), lineWidthRange( lineWidthRange_ ), pointSizeGranularity( pointSizeGranularity_ ), lineWidthGranularity( lineWidthGranularity_ ), strictLines( strictLines_ ), standardSampleLocations( standardSampleLocations_ ), optimalBufferCopyOffsetAlignment( optimalBufferCopyOffsetAlignment_ ), optimalBufferCopyRowPitchAlignment( optimalBufferCopyRowPitchAlignment_ ), nonCoherentAtomSize( nonCoherentAtomSize_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLimits( PhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceLimits( VkPhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkPhysicalDeviceLimits*>(this);
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceLimits & operator=( VkPhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits const *>( &rhs );
+      return *this;
     }
 
-    operator VkPhysicalDeviceLimits &()
+    PhysicalDeviceLimits & operator=( PhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkPhysicalDeviceLimits*>(this);
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceLimits ) );
+      return *this;
     }
 
-    bool operator==( PhysicalDeviceLimits const& rhs ) const
+
+    operator VkPhysicalDeviceLimits const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceLimits*>( this );
+    }
+
+    operator VkPhysicalDeviceLimits &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceLimits*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceLimits const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceLimits const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( maxImageDimension1D == rhs.maxImageDimension1D )
           && ( maxImageDimension2D == rhs.maxImageDimension2D )
@@ -27596,9 +52713,9 @@
           && ( maxFragmentDualSrcAttachments == rhs.maxFragmentDualSrcAttachments )
           && ( maxFragmentCombinedOutputResources == rhs.maxFragmentCombinedOutputResources )
           && ( maxComputeSharedMemorySize == rhs.maxComputeSharedMemorySize )
-          && ( memcmp( maxComputeWorkGroupCount, rhs.maxComputeWorkGroupCount, 3 * sizeof( uint32_t ) ) == 0 )
+          && ( maxComputeWorkGroupCount == rhs.maxComputeWorkGroupCount )
           && ( maxComputeWorkGroupInvocations == rhs.maxComputeWorkGroupInvocations )
-          && ( memcmp( maxComputeWorkGroupSize, rhs.maxComputeWorkGroupSize, 3 * sizeof( uint32_t ) ) == 0 )
+          && ( maxComputeWorkGroupSize == rhs.maxComputeWorkGroupSize )
           && ( subPixelPrecisionBits == rhs.subPixelPrecisionBits )
           && ( subTexelPrecisionBits == rhs.subTexelPrecisionBits )
           && ( mipmapPrecisionBits == rhs.mipmapPrecisionBits )
@@ -27607,8 +52724,8 @@
           && ( maxSamplerLodBias == rhs.maxSamplerLodBias )
           && ( maxSamplerAnisotropy == rhs.maxSamplerAnisotropy )
           && ( maxViewports == rhs.maxViewports )
-          && ( memcmp( maxViewportDimensions, rhs.maxViewportDimensions, 2 * sizeof( uint32_t ) ) == 0 )
-          && ( memcmp( viewportBoundsRange, rhs.viewportBoundsRange, 2 * sizeof( float ) ) == 0 )
+          && ( maxViewportDimensions == rhs.maxViewportDimensions )
+          && ( viewportBoundsRange == rhs.viewportBoundsRange )
           && ( viewportSubPixelBits == rhs.viewportSubPixelBits )
           && ( minMemoryMapAlignment == rhs.minMemoryMapAlignment )
           && ( minTexelBufferOffsetAlignment == rhs.minTexelBufferOffsetAlignment )
@@ -27641,8 +52758,8 @@
           && ( maxCullDistances == rhs.maxCullDistances )
           && ( maxCombinedClipAndCullDistances == rhs.maxCombinedClipAndCullDistances )
           && ( discreteQueuePriorities == rhs.discreteQueuePriorities )
-          && ( memcmp( pointSizeRange, rhs.pointSizeRange, 2 * sizeof( float ) ) == 0 )
-          && ( memcmp( lineWidthRange, rhs.lineWidthRange, 2 * sizeof( float ) ) == 0 )
+          && ( pointSizeRange == rhs.pointSizeRange )
+          && ( lineWidthRange == rhs.lineWidthRange )
           && ( pointSizeGranularity == rhs.pointSizeGranularity )
           && ( lineWidthGranularity == rhs.lineWidthGranularity )
           && ( strictLines == rhs.strictLines )
@@ -27652,304 +52769,700 @@
           && ( nonCoherentAtomSize == rhs.nonCoherentAtomSize );
     }
 
-    bool operator!=( PhysicalDeviceLimits const& rhs ) const
+    bool operator!=( PhysicalDeviceLimits const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
+#endif
 
-    uint32_t maxImageDimension1D;
-    uint32_t maxImageDimension2D;
-    uint32_t maxImageDimension3D;
-    uint32_t maxImageDimensionCube;
-    uint32_t maxImageArrayLayers;
-    uint32_t maxTexelBufferElements;
-    uint32_t maxUniformBufferRange;
-    uint32_t maxStorageBufferRange;
-    uint32_t maxPushConstantsSize;
-    uint32_t maxMemoryAllocationCount;
-    uint32_t maxSamplerAllocationCount;
-    DeviceSize bufferImageGranularity;
-    DeviceSize sparseAddressSpaceSize;
-    uint32_t maxBoundDescriptorSets;
-    uint32_t maxPerStageDescriptorSamplers;
-    uint32_t maxPerStageDescriptorUniformBuffers;
-    uint32_t maxPerStageDescriptorStorageBuffers;
-    uint32_t maxPerStageDescriptorSampledImages;
-    uint32_t maxPerStageDescriptorStorageImages;
-    uint32_t maxPerStageDescriptorInputAttachments;
-    uint32_t maxPerStageResources;
-    uint32_t maxDescriptorSetSamplers;
-    uint32_t maxDescriptorSetUniformBuffers;
-    uint32_t maxDescriptorSetUniformBuffersDynamic;
-    uint32_t maxDescriptorSetStorageBuffers;
-    uint32_t maxDescriptorSetStorageBuffersDynamic;
-    uint32_t maxDescriptorSetSampledImages;
-    uint32_t maxDescriptorSetStorageImages;
-    uint32_t maxDescriptorSetInputAttachments;
-    uint32_t maxVertexInputAttributes;
-    uint32_t maxVertexInputBindings;
-    uint32_t maxVertexInputAttributeOffset;
-    uint32_t maxVertexInputBindingStride;
-    uint32_t maxVertexOutputComponents;
-    uint32_t maxTessellationGenerationLevel;
-    uint32_t maxTessellationPatchSize;
-    uint32_t maxTessellationControlPerVertexInputComponents;
-    uint32_t maxTessellationControlPerVertexOutputComponents;
-    uint32_t maxTessellationControlPerPatchOutputComponents;
-    uint32_t maxTessellationControlTotalOutputComponents;
-    uint32_t maxTessellationEvaluationInputComponents;
-    uint32_t maxTessellationEvaluationOutputComponents;
-    uint32_t maxGeometryShaderInvocations;
-    uint32_t maxGeometryInputComponents;
-    uint32_t maxGeometryOutputComponents;
-    uint32_t maxGeometryOutputVertices;
-    uint32_t maxGeometryTotalOutputComponents;
-    uint32_t maxFragmentInputComponents;
-    uint32_t maxFragmentOutputAttachments;
-    uint32_t maxFragmentDualSrcAttachments;
-    uint32_t maxFragmentCombinedOutputResources;
-    uint32_t maxComputeSharedMemorySize;
-    uint32_t maxComputeWorkGroupCount[3];
-    uint32_t maxComputeWorkGroupInvocations;
-    uint32_t maxComputeWorkGroupSize[3];
-    uint32_t subPixelPrecisionBits;
-    uint32_t subTexelPrecisionBits;
-    uint32_t mipmapPrecisionBits;
-    uint32_t maxDrawIndexedIndexValue;
-    uint32_t maxDrawIndirectCount;
-    float maxSamplerLodBias;
-    float maxSamplerAnisotropy;
-    uint32_t maxViewports;
-    uint32_t maxViewportDimensions[2];
-    float viewportBoundsRange[2];
-    uint32_t viewportSubPixelBits;
-    size_t minMemoryMapAlignment;
-    DeviceSize minTexelBufferOffsetAlignment;
-    DeviceSize minUniformBufferOffsetAlignment;
-    DeviceSize minStorageBufferOffsetAlignment;
-    int32_t minTexelOffset;
-    uint32_t maxTexelOffset;
-    int32_t minTexelGatherOffset;
-    uint32_t maxTexelGatherOffset;
-    float minInterpolationOffset;
-    float maxInterpolationOffset;
-    uint32_t subPixelInterpolationOffsetBits;
-    uint32_t maxFramebufferWidth;
-    uint32_t maxFramebufferHeight;
-    uint32_t maxFramebufferLayers;
-    SampleCountFlags framebufferColorSampleCounts;
-    SampleCountFlags framebufferDepthSampleCounts;
-    SampleCountFlags framebufferStencilSampleCounts;
-    SampleCountFlags framebufferNoAttachmentsSampleCounts;
-    uint32_t maxColorAttachments;
-    SampleCountFlags sampledImageColorSampleCounts;
-    SampleCountFlags sampledImageIntegerSampleCounts;
-    SampleCountFlags sampledImageDepthSampleCounts;
-    SampleCountFlags sampledImageStencilSampleCounts;
-    SampleCountFlags storageImageSampleCounts;
-    uint32_t maxSampleMaskWords;
-    Bool32 timestampComputeAndGraphics;
-    float timestampPeriod;
-    uint32_t maxClipDistances;
-    uint32_t maxCullDistances;
-    uint32_t maxCombinedClipAndCullDistances;
-    uint32_t discreteQueuePriorities;
-    float pointSizeRange[2];
-    float lineWidthRange[2];
-    float pointSizeGranularity;
-    float lineWidthGranularity;
-    Bool32 strictLines;
-    Bool32 standardSampleLocations;
-    DeviceSize optimalBufferCopyOffsetAlignment;
-    DeviceSize optimalBufferCopyRowPitchAlignment;
-    DeviceSize nonCoherentAtomSize;
+
+
+  public:
+    uint32_t maxImageDimension1D = {};
+    uint32_t maxImageDimension2D = {};
+    uint32_t maxImageDimension3D = {};
+    uint32_t maxImageDimensionCube = {};
+    uint32_t maxImageArrayLayers = {};
+    uint32_t maxTexelBufferElements = {};
+    uint32_t maxUniformBufferRange = {};
+    uint32_t maxStorageBufferRange = {};
+    uint32_t maxPushConstantsSize = {};
+    uint32_t maxMemoryAllocationCount = {};
+    uint32_t maxSamplerAllocationCount = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize bufferImageGranularity = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize sparseAddressSpaceSize = {};
+    uint32_t maxBoundDescriptorSets = {};
+    uint32_t maxPerStageDescriptorSamplers = {};
+    uint32_t maxPerStageDescriptorUniformBuffers = {};
+    uint32_t maxPerStageDescriptorStorageBuffers = {};
+    uint32_t maxPerStageDescriptorSampledImages = {};
+    uint32_t maxPerStageDescriptorStorageImages = {};
+    uint32_t maxPerStageDescriptorInputAttachments = {};
+    uint32_t maxPerStageResources = {};
+    uint32_t maxDescriptorSetSamplers = {};
+    uint32_t maxDescriptorSetUniformBuffers = {};
+    uint32_t maxDescriptorSetUniformBuffersDynamic = {};
+    uint32_t maxDescriptorSetStorageBuffers = {};
+    uint32_t maxDescriptorSetStorageBuffersDynamic = {};
+    uint32_t maxDescriptorSetSampledImages = {};
+    uint32_t maxDescriptorSetStorageImages = {};
+    uint32_t maxDescriptorSetInputAttachments = {};
+    uint32_t maxVertexInputAttributes = {};
+    uint32_t maxVertexInputBindings = {};
+    uint32_t maxVertexInputAttributeOffset = {};
+    uint32_t maxVertexInputBindingStride = {};
+    uint32_t maxVertexOutputComponents = {};
+    uint32_t maxTessellationGenerationLevel = {};
+    uint32_t maxTessellationPatchSize = {};
+    uint32_t maxTessellationControlPerVertexInputComponents = {};
+    uint32_t maxTessellationControlPerVertexOutputComponents = {};
+    uint32_t maxTessellationControlPerPatchOutputComponents = {};
+    uint32_t maxTessellationControlTotalOutputComponents = {};
+    uint32_t maxTessellationEvaluationInputComponents = {};
+    uint32_t maxTessellationEvaluationOutputComponents = {};
+    uint32_t maxGeometryShaderInvocations = {};
+    uint32_t maxGeometryInputComponents = {};
+    uint32_t maxGeometryOutputComponents = {};
+    uint32_t maxGeometryOutputVertices = {};
+    uint32_t maxGeometryTotalOutputComponents = {};
+    uint32_t maxFragmentInputComponents = {};
+    uint32_t maxFragmentOutputAttachments = {};
+    uint32_t maxFragmentDualSrcAttachments = {};
+    uint32_t maxFragmentCombinedOutputResources = {};
+    uint32_t maxComputeSharedMemorySize = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> maxComputeWorkGroupCount = {};
+    uint32_t maxComputeWorkGroupInvocations = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> maxComputeWorkGroupSize = {};
+    uint32_t subPixelPrecisionBits = {};
+    uint32_t subTexelPrecisionBits = {};
+    uint32_t mipmapPrecisionBits = {};
+    uint32_t maxDrawIndexedIndexValue = {};
+    uint32_t maxDrawIndirectCount = {};
+    float maxSamplerLodBias = {};
+    float maxSamplerAnisotropy = {};
+    uint32_t maxViewports = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 2> maxViewportDimensions = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 2> viewportBoundsRange = {};
+    uint32_t viewportSubPixelBits = {};
+    size_t minMemoryMapAlignment = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize minTexelBufferOffsetAlignment = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize minUniformBufferOffsetAlignment = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize minStorageBufferOffsetAlignment = {};
+    int32_t minTexelOffset = {};
+    uint32_t maxTexelOffset = {};
+    int32_t minTexelGatherOffset = {};
+    uint32_t maxTexelGatherOffset = {};
+    float minInterpolationOffset = {};
+    float maxInterpolationOffset = {};
+    uint32_t subPixelInterpolationOffsetBits = {};
+    uint32_t maxFramebufferWidth = {};
+    uint32_t maxFramebufferHeight = {};
+    uint32_t maxFramebufferLayers = {};
+    VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferColorSampleCounts = {};
+    VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferDepthSampleCounts = {};
+    VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferStencilSampleCounts = {};
+    VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferNoAttachmentsSampleCounts = {};
+    uint32_t maxColorAttachments = {};
+    VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageColorSampleCounts = {};
+    VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageIntegerSampleCounts = {};
+    VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageDepthSampleCounts = {};
+    VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageStencilSampleCounts = {};
+    VULKAN_HPP_NAMESPACE::SampleCountFlags storageImageSampleCounts = {};
+    uint32_t maxSampleMaskWords = {};
+    VULKAN_HPP_NAMESPACE::Bool32 timestampComputeAndGraphics = {};
+    float timestampPeriod = {};
+    uint32_t maxClipDistances = {};
+    uint32_t maxCullDistances = {};
+    uint32_t maxCombinedClipAndCullDistances = {};
+    uint32_t discreteQueuePriorities = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 2> pointSizeRange = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 2> lineWidthRange = {};
+    float pointSizeGranularity = {};
+    float lineWidthGranularity = {};
+    VULKAN_HPP_NAMESPACE::Bool32 strictLines = {};
+    VULKAN_HPP_NAMESPACE::Bool32 standardSampleLocations = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyOffsetAlignment = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyRowPitchAlignment = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize nonCoherentAtomSize = {};
+
   };
   static_assert( sizeof( PhysicalDeviceLimits ) == sizeof( VkPhysicalDeviceLimits ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceLimits>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceSparseProperties
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseProperties(VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DBlockShape_ = {}, VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DMultisampleBlockShape_ = {}, VULKAN_HPP_NAMESPACE::Bool32 residencyStandard3DBlockShape_ = {}, VULKAN_HPP_NAMESPACE::Bool32 residencyAlignedMipSize_ = {}, VULKAN_HPP_NAMESPACE::Bool32 residencyNonResidentStrict_ = {}) VULKAN_HPP_NOEXCEPT
+    : residencyStandard2DBlockShape( residencyStandard2DBlockShape_ ), residencyStandard2DMultisampleBlockShape( residencyStandard2DMultisampleBlockShape_ ), residencyStandard3DBlockShape( residencyStandard3DBlockShape_ ), residencyAlignedMipSize( residencyAlignedMipSize_ ), residencyNonResidentStrict( residencyNonResidentStrict_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseProperties( PhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceSparseProperties( VkPhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceSparseProperties & operator=( VkPhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceSparseProperties & operator=( PhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceSparseProperties ) );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceSparseProperties const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceSparseProperties*>( this );
+    }
+
+    operator VkPhysicalDeviceSparseProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceSparseProperties*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceSparseProperties const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceSparseProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( residencyStandard2DBlockShape == rhs.residencyStandard2DBlockShape )
+          && ( residencyStandard2DMultisampleBlockShape == rhs.residencyStandard2DMultisampleBlockShape )
+          && ( residencyStandard3DBlockShape == rhs.residencyStandard3DBlockShape )
+          && ( residencyAlignedMipSize == rhs.residencyAlignedMipSize )
+          && ( residencyNonResidentStrict == rhs.residencyNonResidentStrict );
+    }
+
+    bool operator!=( PhysicalDeviceSparseProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DBlockShape = {};
+    VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DMultisampleBlockShape = {};
+    VULKAN_HPP_NAMESPACE::Bool32 residencyStandard3DBlockShape = {};
+    VULKAN_HPP_NAMESPACE::Bool32 residencyAlignedMipSize = {};
+    VULKAN_HPP_NAMESPACE::Bool32 residencyNonResidentStrict = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceSparseProperties ) == sizeof( VkPhysicalDeviceSparseProperties ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceSparseProperties>::value, "struct wrapper is not a standard layout!" );
 
   struct PhysicalDeviceProperties
   {
-    operator VkPhysicalDeviceProperties const&() const
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties(uint32_t apiVersion_ = {}, uint32_t driverVersion_ = {}, uint32_t vendorID_ = {}, uint32_t deviceID_ = {}, VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType_ = VULKAN_HPP_NAMESPACE::PhysicalDeviceType::eOther, std::array<char,VK_MAX_PHYSICAL_DEVICE_NAME_SIZE> const& deviceName_ = {}, std::array<uint8_t,VK_UUID_SIZE> const& pipelineCacheUUID_ = {}, VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits limits_ = {}, VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties sparseProperties_ = {}) VULKAN_HPP_NOEXCEPT
+    : apiVersion( apiVersion_ ), driverVersion( driverVersion_ ), vendorID( vendorID_ ), deviceID( deviceID_ ), deviceType( deviceType_ ), deviceName( deviceName_ ), pipelineCacheUUID( pipelineCacheUUID_ ), limits( limits_ ), sparseProperties( sparseProperties_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties( PhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceProperties( VkPhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkPhysicalDeviceProperties*>(this);
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceProperties & operator=( VkPhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties const *>( &rhs );
+      return *this;
     }
 
-    operator VkPhysicalDeviceProperties &()
+    PhysicalDeviceProperties & operator=( PhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkPhysicalDeviceProperties*>(this);
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceProperties ) );
+      return *this;
     }
 
-    bool operator==( PhysicalDeviceProperties const& rhs ) const
+
+    operator VkPhysicalDeviceProperties const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceProperties*>( this );
+    }
+
+    operator VkPhysicalDeviceProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceProperties*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceProperties const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( apiVersion == rhs.apiVersion )
           && ( driverVersion == rhs.driverVersion )
           && ( vendorID == rhs.vendorID )
           && ( deviceID == rhs.deviceID )
           && ( deviceType == rhs.deviceType )
-          && ( memcmp( deviceName, rhs.deviceName, VK_MAX_PHYSICAL_DEVICE_NAME_SIZE * sizeof( char ) ) == 0 )
-          && ( memcmp( pipelineCacheUUID, rhs.pipelineCacheUUID, VK_UUID_SIZE * sizeof( uint8_t ) ) == 0 )
+          && ( deviceName == rhs.deviceName )
+          && ( pipelineCacheUUID == rhs.pipelineCacheUUID )
           && ( limits == rhs.limits )
           && ( sparseProperties == rhs.sparseProperties );
     }
 
-    bool operator!=( PhysicalDeviceProperties const& rhs ) const
+    bool operator!=( PhysicalDeviceProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
+#endif
 
-    uint32_t apiVersion;
-    uint32_t driverVersion;
-    uint32_t vendorID;
-    uint32_t deviceID;
-    PhysicalDeviceType deviceType;
-    char deviceName[VK_MAX_PHYSICAL_DEVICE_NAME_SIZE];
-    uint8_t pipelineCacheUUID[VK_UUID_SIZE];
-    PhysicalDeviceLimits limits;
-    PhysicalDeviceSparseProperties sparseProperties;
+
+
+  public:
+    uint32_t apiVersion = {};
+    uint32_t driverVersion = {};
+    uint32_t vendorID = {};
+    uint32_t deviceID = {};
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType = VULKAN_HPP_NAMESPACE::PhysicalDeviceType::eOther;
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_PHYSICAL_DEVICE_NAME_SIZE> deviceName = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> pipelineCacheUUID = {};
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits limits = {};
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties sparseProperties = {};
+
   };
   static_assert( sizeof( PhysicalDeviceProperties ) == sizeof( VkPhysicalDeviceProperties ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceProperties>::value, "struct wrapper is not a standard layout!" );
 
   struct PhysicalDeviceProperties2
   {
-    operator VkPhysicalDeviceProperties2 const&() const
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProperties2;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties2(VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties_ = {}) VULKAN_HPP_NOEXCEPT
+    : properties( properties_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties2( PhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceProperties2( VkPhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkPhysicalDeviceProperties2*>(this);
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceProperties2 & operator=( VkPhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 const *>( &rhs );
+      return *this;
     }
 
-    operator VkPhysicalDeviceProperties2 &()
+    PhysicalDeviceProperties2 & operator=( PhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkPhysicalDeviceProperties2*>(this);
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceProperties2 ) );
+      return *this;
     }
 
-    bool operator==( PhysicalDeviceProperties2 const& rhs ) const
+
+    operator VkPhysicalDeviceProperties2 const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceProperties2*>( this );
+    }
+
+    operator VkPhysicalDeviceProperties2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceProperties2*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceProperties2 const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
           && ( properties == rhs.properties );
     }
 
-    bool operator!=( PhysicalDeviceProperties2 const& rhs ) const
+    bool operator!=( PhysicalDeviceProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
+#endif
 
-  private:
-    StructureType sType = StructureType::ePhysicalDeviceProperties2;
+
 
   public:
-    void* pNext = nullptr;
-    PhysicalDeviceProperties properties;
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProperties2;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties = {};
+
   };
   static_assert( sizeof( PhysicalDeviceProperties2 ) == sizeof( VkPhysicalDeviceProperties2 ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceProperties2>::value, "struct wrapper is not a standard layout!" );
 
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceProperties2>
+  {
+    using Type = PhysicalDeviceProperties2;
+  };
   using PhysicalDeviceProperties2KHR = PhysicalDeviceProperties2;
 
-  struct ImageFormatProperties2
+  struct QueryPoolPerformanceCreateInfoKHR
   {
-    operator VkImageFormatProperties2 const&() const
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueryPoolPerformanceCreateInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR QueryPoolPerformanceCreateInfoKHR(uint32_t queueFamilyIndex_ = {}, uint32_t counterIndexCount_ = {}, const uint32_t* pCounterIndices_ = {}) VULKAN_HPP_NOEXCEPT
+    : queueFamilyIndex( queueFamilyIndex_ ), counterIndexCount( counterIndexCount_ ), pCounterIndices( pCounterIndices_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR QueryPoolPerformanceCreateInfoKHR( QueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    QueryPoolPerformanceCreateInfoKHR( VkQueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkImageFormatProperties2*>(this);
+      *this = rhs;
     }
 
-    operator VkImageFormatProperties2 &()
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    QueryPoolPerformanceCreateInfoKHR( uint32_t queueFamilyIndex_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & counterIndices_ )
+    : queueFamilyIndex( queueFamilyIndex_ ), counterIndexCount( static_cast<uint32_t>( counterIndices_.size() ) ), pCounterIndices( counterIndices_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    QueryPoolPerformanceCreateInfoKHR & operator=( VkQueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkImageFormatProperties2*>(this);
-    }
-
-    bool operator==( ImageFormatProperties2 const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( imageFormatProperties == rhs.imageFormatProperties );
-    }
-
-    bool operator!=( ImageFormatProperties2 const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eImageFormatProperties2;
-
-  public:
-    void* pNext = nullptr;
-    ImageFormatProperties imageFormatProperties;
-  };
-  static_assert( sizeof( ImageFormatProperties2 ) == sizeof( VkImageFormatProperties2 ), "struct and wrapper have different size!" );
-
-  using ImageFormatProperties2KHR = ImageFormatProperties2;
-
-  struct PhysicalDeviceSparseImageFormatInfo2
-  {
-    PhysicalDeviceSparseImageFormatInfo2( Format format_ = Format::eUndefined,
-                                          ImageType type_ = ImageType::e1D,
-                                          SampleCountFlagBits samples_ = SampleCountFlagBits::e1,
-                                          ImageUsageFlags usage_ = ImageUsageFlags(),
-                                          ImageTiling tiling_ = ImageTiling::eOptimal )
-      : format( format_ )
-      , type( type_ )
-      , samples( samples_ )
-      , usage( usage_ )
-      , tiling( tiling_ )
-    {
-    }
-
-    PhysicalDeviceSparseImageFormatInfo2( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceSparseImageFormatInfo2 ) );
-    }
-
-    PhysicalDeviceSparseImageFormatInfo2& operator=( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceSparseImageFormatInfo2 ) );
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR const *>( &rhs );
       return *this;
     }
-    PhysicalDeviceSparseImageFormatInfo2& setPNext( const void* pNext_ )
+
+    QueryPoolPerformanceCreateInfoKHR & operator=( QueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( QueryPoolPerformanceCreateInfoKHR ) );
+      return *this;
+    }
+
+    QueryPoolPerformanceCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
       pNext = pNext_;
       return *this;
     }
 
-    PhysicalDeviceSparseImageFormatInfo2& setFormat( Format format_ )
+    QueryPoolPerformanceCreateInfoKHR & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queueFamilyIndex = queueFamilyIndex_;
+      return *this;
+    }
+
+    QueryPoolPerformanceCreateInfoKHR & setCounterIndexCount( uint32_t counterIndexCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      counterIndexCount = counterIndexCount_;
+      return *this;
+    }
+
+    QueryPoolPerformanceCreateInfoKHR & setPCounterIndices( const uint32_t* pCounterIndices_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pCounterIndices = pCounterIndices_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    QueryPoolPerformanceCreateInfoKHR & setCounterIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & counterIndices_ ) VULKAN_HPP_NOEXCEPT
+    {
+      counterIndexCount = static_cast<uint32_t>( counterIndices_.size() );
+      pCounterIndices = counterIndices_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkQueryPoolPerformanceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR*>( this );
+    }
+
+    operator VkQueryPoolPerformanceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkQueryPoolPerformanceCreateInfoKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( QueryPoolPerformanceCreateInfoKHR const& ) const = default;
+#else
+    bool operator==( QueryPoolPerformanceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( queueFamilyIndex == rhs.queueFamilyIndex )
+          && ( counterIndexCount == rhs.counterIndexCount )
+          && ( pCounterIndices == rhs.pCounterIndices );
+    }
+
+    bool operator!=( QueryPoolPerformanceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolPerformanceCreateInfoKHR;
+    const void* pNext = {};
+    uint32_t queueFamilyIndex = {};
+    uint32_t counterIndexCount = {};
+    const uint32_t* pCounterIndices = {};
+
+  };
+  static_assert( sizeof( QueryPoolPerformanceCreateInfoKHR ) == sizeof( VkQueryPoolPerformanceCreateInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<QueryPoolPerformanceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eQueryPoolPerformanceCreateInfoKHR>
+  {
+    using Type = QueryPoolPerformanceCreateInfoKHR;
+  };
+
+  struct QueueFamilyProperties
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR QueueFamilyProperties(VULKAN_HPP_NAMESPACE::QueueFlags queueFlags_ = {}, uint32_t queueCount_ = {}, uint32_t timestampValidBits_ = {}, VULKAN_HPP_NAMESPACE::Extent3D minImageTransferGranularity_ = {}) VULKAN_HPP_NOEXCEPT
+    : queueFlags( queueFlags_ ), queueCount( queueCount_ ), timestampValidBits( timestampValidBits_ ), minImageTransferGranularity( minImageTransferGranularity_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR QueueFamilyProperties( QueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    QueueFamilyProperties( VkQueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    QueueFamilyProperties & operator=( VkQueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueueFamilyProperties const *>( &rhs );
+      return *this;
+    }
+
+    QueueFamilyProperties & operator=( QueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( QueueFamilyProperties ) );
+      return *this;
+    }
+
+
+    operator VkQueueFamilyProperties const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkQueueFamilyProperties*>( this );
+    }
+
+    operator VkQueueFamilyProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkQueueFamilyProperties*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( QueueFamilyProperties const& ) const = default;
+#else
+    bool operator==( QueueFamilyProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( queueFlags == rhs.queueFlags )
+          && ( queueCount == rhs.queueCount )
+          && ( timestampValidBits == rhs.timestampValidBits )
+          && ( minImageTransferGranularity == rhs.minImageTransferGranularity );
+    }
+
+    bool operator!=( QueueFamilyProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::QueueFlags queueFlags = {};
+    uint32_t queueCount = {};
+    uint32_t timestampValidBits = {};
+    VULKAN_HPP_NAMESPACE::Extent3D minImageTransferGranularity = {};
+
+  };
+  static_assert( sizeof( QueueFamilyProperties ) == sizeof( VkQueueFamilyProperties ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<QueueFamilyProperties>::value, "struct wrapper is not a standard layout!" );
+
+  struct QueueFamilyProperties2
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyProperties2;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR QueueFamilyProperties2(VULKAN_HPP_NAMESPACE::QueueFamilyProperties queueFamilyProperties_ = {}) VULKAN_HPP_NOEXCEPT
+    : queueFamilyProperties( queueFamilyProperties_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR QueueFamilyProperties2( QueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    QueueFamilyProperties2( VkQueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    QueueFamilyProperties2 & operator=( VkQueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 const *>( &rhs );
+      return *this;
+    }
+
+    QueueFamilyProperties2 & operator=( QueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( QueueFamilyProperties2 ) );
+      return *this;
+    }
+
+
+    operator VkQueueFamilyProperties2 const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkQueueFamilyProperties2*>( this );
+    }
+
+    operator VkQueueFamilyProperties2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkQueueFamilyProperties2*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( QueueFamilyProperties2 const& ) const = default;
+#else
+    bool operator==( QueueFamilyProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( queueFamilyProperties == rhs.queueFamilyProperties );
+    }
+
+    bool operator!=( QueueFamilyProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyProperties2;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::QueueFamilyProperties queueFamilyProperties = {};
+
+  };
+  static_assert( sizeof( QueueFamilyProperties2 ) == sizeof( VkQueueFamilyProperties2 ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<QueueFamilyProperties2>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eQueueFamilyProperties2>
+  {
+    using Type = QueueFamilyProperties2;
+  };
+  using QueueFamilyProperties2KHR = QueueFamilyProperties2;
+
+  struct PhysicalDeviceSparseImageFormatInfo2
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSparseImageFormatInfo2;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseImageFormatInfo2(VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::ImageType type_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal) VULKAN_HPP_NOEXCEPT
+    : format( format_ ), type( type_ ), samples( samples_ ), usage( usage_ ), tiling( tiling_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseImageFormatInfo2( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceSparseImageFormatInfo2( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceSparseImageFormatInfo2 & operator=( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceSparseImageFormatInfo2 & operator=( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceSparseImageFormatInfo2 ) );
+      return *this;
+    }
+
+    PhysicalDeviceSparseImageFormatInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceSparseImageFormatInfo2 & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
     {
       format = format_;
       return *this;
     }
 
-    PhysicalDeviceSparseImageFormatInfo2& setType( ImageType type_ )
+    PhysicalDeviceSparseImageFormatInfo2 & setType( VULKAN_HPP_NAMESPACE::ImageType type_ ) VULKAN_HPP_NOEXCEPT
     {
       type = type_;
       return *this;
     }
 
-    PhysicalDeviceSparseImageFormatInfo2& setSamples( SampleCountFlagBits samples_ )
+    PhysicalDeviceSparseImageFormatInfo2 & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT
     {
       samples = samples_;
       return *this;
     }
 
-    PhysicalDeviceSparseImageFormatInfo2& setUsage( ImageUsageFlags usage_ )
+    PhysicalDeviceSparseImageFormatInfo2 & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
     {
       usage = usage_;
       return *this;
     }
 
-    PhysicalDeviceSparseImageFormatInfo2& setTiling( ImageTiling tiling_ )
+    PhysicalDeviceSparseImageFormatInfo2 & setTiling( VULKAN_HPP_NAMESPACE::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT
     {
       tiling = tiling_;
       return *this;
     }
 
-    operator VkPhysicalDeviceSparseImageFormatInfo2 const&() const
+
+    operator VkPhysicalDeviceSparseImageFormatInfo2 const&() const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2*>(this);
+      return *reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2*>( this );
     }
 
-    operator VkPhysicalDeviceSparseImageFormatInfo2 &()
+    operator VkPhysicalDeviceSparseImageFormatInfo2 &() VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkPhysicalDeviceSparseImageFormatInfo2*>(this);
+      return *reinterpret_cast<VkPhysicalDeviceSparseImageFormatInfo2*>( this );
     }
 
-    bool operator==( PhysicalDeviceSparseImageFormatInfo2 const& rhs ) const
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceSparseImageFormatInfo2 const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceSparseImageFormatInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
@@ -27960,5668 +53473,237 @@
           && ( tiling == rhs.tiling );
     }
 
-    bool operator!=( PhysicalDeviceSparseImageFormatInfo2 const& rhs ) const
+    bool operator!=( PhysicalDeviceSparseImageFormatInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
+#endif
 
-  private:
-    StructureType sType = StructureType::ePhysicalDeviceSparseImageFormatInfo2;
+
 
   public:
-    const void* pNext = nullptr;
-    Format format;
-    ImageType type;
-    SampleCountFlagBits samples;
-    ImageUsageFlags usage;
-    ImageTiling tiling;
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSparseImageFormatInfo2;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+    VULKAN_HPP_NAMESPACE::ImageType type = VULKAN_HPP_NAMESPACE::ImageType::e1D;
+    VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
+    VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {};
+    VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal;
+
   };
   static_assert( sizeof( PhysicalDeviceSparseImageFormatInfo2 ) == sizeof( VkPhysicalDeviceSparseImageFormatInfo2 ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceSparseImageFormatInfo2>::value, "struct wrapper is not a standard layout!" );
 
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceSparseImageFormatInfo2>
+  {
+    using Type = PhysicalDeviceSparseImageFormatInfo2;
+  };
   using PhysicalDeviceSparseImageFormatInfo2KHR = PhysicalDeviceSparseImageFormatInfo2;
 
-  struct SampleLocationsInfoEXT
+  struct SparseImageFormatProperties2
   {
-    SampleLocationsInfoEXT( SampleCountFlagBits sampleLocationsPerPixel_ = SampleCountFlagBits::e1,
-                            Extent2D sampleLocationGridSize_ = Extent2D(),
-                            uint32_t sampleLocationsCount_ = 0,
-                            const SampleLocationEXT* pSampleLocations_ = nullptr )
-      : sampleLocationsPerPixel( sampleLocationsPerPixel_ )
-      , sampleLocationGridSize( sampleLocationGridSize_ )
-      , sampleLocationsCount( sampleLocationsCount_ )
-      , pSampleLocations( pSampleLocations_ )
-    {
-    }
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSparseImageFormatProperties2;
 
-    SampleLocationsInfoEXT( VkSampleLocationsInfoEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( SampleLocationsInfoEXT ) );
-    }
-
-    SampleLocationsInfoEXT& operator=( VkSampleLocationsInfoEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( SampleLocationsInfoEXT ) );
-      return *this;
-    }
-    SampleLocationsInfoEXT& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    SampleLocationsInfoEXT& setSampleLocationsPerPixel( SampleCountFlagBits sampleLocationsPerPixel_ )
-    {
-      sampleLocationsPerPixel = sampleLocationsPerPixel_;
-      return *this;
-    }
-
-    SampleLocationsInfoEXT& setSampleLocationGridSize( Extent2D sampleLocationGridSize_ )
-    {
-      sampleLocationGridSize = sampleLocationGridSize_;
-      return *this;
-    }
-
-    SampleLocationsInfoEXT& setSampleLocationsCount( uint32_t sampleLocationsCount_ )
-    {
-      sampleLocationsCount = sampleLocationsCount_;
-      return *this;
-    }
-
-    SampleLocationsInfoEXT& setPSampleLocations( const SampleLocationEXT* pSampleLocations_ )
-    {
-      pSampleLocations = pSampleLocations_;
-      return *this;
-    }
-
-    operator VkSampleLocationsInfoEXT const&() const
-    {
-      return *reinterpret_cast<const VkSampleLocationsInfoEXT*>(this);
-    }
-
-    operator VkSampleLocationsInfoEXT &()
-    {
-      return *reinterpret_cast<VkSampleLocationsInfoEXT*>(this);
-    }
-
-    bool operator==( SampleLocationsInfoEXT const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( sampleLocationsPerPixel == rhs.sampleLocationsPerPixel )
-          && ( sampleLocationGridSize == rhs.sampleLocationGridSize )
-          && ( sampleLocationsCount == rhs.sampleLocationsCount )
-          && ( pSampleLocations == rhs.pSampleLocations );
-    }
-
-    bool operator!=( SampleLocationsInfoEXT const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eSampleLocationsInfoEXT;
-
-  public:
-    const void* pNext = nullptr;
-    SampleCountFlagBits sampleLocationsPerPixel;
-    Extent2D sampleLocationGridSize;
-    uint32_t sampleLocationsCount;
-    const SampleLocationEXT* pSampleLocations;
-  };
-  static_assert( sizeof( SampleLocationsInfoEXT ) == sizeof( VkSampleLocationsInfoEXT ), "struct and wrapper have different size!" );
-
-  struct AttachmentSampleLocationsEXT
-  {
-    AttachmentSampleLocationsEXT( uint32_t attachmentIndex_ = 0,
-                                  SampleLocationsInfoEXT sampleLocationsInfo_ = SampleLocationsInfoEXT() )
-      : attachmentIndex( attachmentIndex_ )
-      , sampleLocationsInfo( sampleLocationsInfo_ )
-    {
-    }
-
-    AttachmentSampleLocationsEXT( VkAttachmentSampleLocationsEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( AttachmentSampleLocationsEXT ) );
-    }
-
-    AttachmentSampleLocationsEXT& operator=( VkAttachmentSampleLocationsEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( AttachmentSampleLocationsEXT ) );
-      return *this;
-    }
-    AttachmentSampleLocationsEXT& setAttachmentIndex( uint32_t attachmentIndex_ )
-    {
-      attachmentIndex = attachmentIndex_;
-      return *this;
-    }
-
-    AttachmentSampleLocationsEXT& setSampleLocationsInfo( SampleLocationsInfoEXT sampleLocationsInfo_ )
-    {
-      sampleLocationsInfo = sampleLocationsInfo_;
-      return *this;
-    }
-
-    operator VkAttachmentSampleLocationsEXT const&() const
-    {
-      return *reinterpret_cast<const VkAttachmentSampleLocationsEXT*>(this);
-    }
-
-    operator VkAttachmentSampleLocationsEXT &()
-    {
-      return *reinterpret_cast<VkAttachmentSampleLocationsEXT*>(this);
-    }
-
-    bool operator==( AttachmentSampleLocationsEXT const& rhs ) const
-    {
-      return ( attachmentIndex == rhs.attachmentIndex )
-          && ( sampleLocationsInfo == rhs.sampleLocationsInfo );
-    }
-
-    bool operator!=( AttachmentSampleLocationsEXT const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    uint32_t attachmentIndex;
-    SampleLocationsInfoEXT sampleLocationsInfo;
-  };
-  static_assert( sizeof( AttachmentSampleLocationsEXT ) == sizeof( VkAttachmentSampleLocationsEXT ), "struct and wrapper have different size!" );
-
-  struct SubpassSampleLocationsEXT
-  {
-    SubpassSampleLocationsEXT( uint32_t subpassIndex_ = 0,
-                               SampleLocationsInfoEXT sampleLocationsInfo_ = SampleLocationsInfoEXT() )
-      : subpassIndex( subpassIndex_ )
-      , sampleLocationsInfo( sampleLocationsInfo_ )
-    {
-    }
-
-    SubpassSampleLocationsEXT( VkSubpassSampleLocationsEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( SubpassSampleLocationsEXT ) );
-    }
-
-    SubpassSampleLocationsEXT& operator=( VkSubpassSampleLocationsEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( SubpassSampleLocationsEXT ) );
-      return *this;
-    }
-    SubpassSampleLocationsEXT& setSubpassIndex( uint32_t subpassIndex_ )
-    {
-      subpassIndex = subpassIndex_;
-      return *this;
-    }
-
-    SubpassSampleLocationsEXT& setSampleLocationsInfo( SampleLocationsInfoEXT sampleLocationsInfo_ )
-    {
-      sampleLocationsInfo = sampleLocationsInfo_;
-      return *this;
-    }
-
-    operator VkSubpassSampleLocationsEXT const&() const
-    {
-      return *reinterpret_cast<const VkSubpassSampleLocationsEXT*>(this);
-    }
-
-    operator VkSubpassSampleLocationsEXT &()
-    {
-      return *reinterpret_cast<VkSubpassSampleLocationsEXT*>(this);
-    }
-
-    bool operator==( SubpassSampleLocationsEXT const& rhs ) const
-    {
-      return ( subpassIndex == rhs.subpassIndex )
-          && ( sampleLocationsInfo == rhs.sampleLocationsInfo );
-    }
-
-    bool operator!=( SubpassSampleLocationsEXT const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    uint32_t subpassIndex;
-    SampleLocationsInfoEXT sampleLocationsInfo;
-  };
-  static_assert( sizeof( SubpassSampleLocationsEXT ) == sizeof( VkSubpassSampleLocationsEXT ), "struct and wrapper have different size!" );
-
-  struct RenderPassSampleLocationsBeginInfoEXT
-  {
-    RenderPassSampleLocationsBeginInfoEXT( uint32_t attachmentInitialSampleLocationsCount_ = 0,
-                                           const AttachmentSampleLocationsEXT* pAttachmentInitialSampleLocations_ = nullptr,
-                                           uint32_t postSubpassSampleLocationsCount_ = 0,
-                                           const SubpassSampleLocationsEXT* pPostSubpassSampleLocations_ = nullptr )
-      : attachmentInitialSampleLocationsCount( attachmentInitialSampleLocationsCount_ )
-      , pAttachmentInitialSampleLocations( pAttachmentInitialSampleLocations_ )
-      , postSubpassSampleLocationsCount( postSubpassSampleLocationsCount_ )
-      , pPostSubpassSampleLocations( pPostSubpassSampleLocations_ )
-    {
-    }
-
-    RenderPassSampleLocationsBeginInfoEXT( VkRenderPassSampleLocationsBeginInfoEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( RenderPassSampleLocationsBeginInfoEXT ) );
-    }
-
-    RenderPassSampleLocationsBeginInfoEXT& operator=( VkRenderPassSampleLocationsBeginInfoEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( RenderPassSampleLocationsBeginInfoEXT ) );
-      return *this;
-    }
-    RenderPassSampleLocationsBeginInfoEXT& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    RenderPassSampleLocationsBeginInfoEXT& setAttachmentInitialSampleLocationsCount( uint32_t attachmentInitialSampleLocationsCount_ )
-    {
-      attachmentInitialSampleLocationsCount = attachmentInitialSampleLocationsCount_;
-      return *this;
-    }
-
-    RenderPassSampleLocationsBeginInfoEXT& setPAttachmentInitialSampleLocations( const AttachmentSampleLocationsEXT* pAttachmentInitialSampleLocations_ )
-    {
-      pAttachmentInitialSampleLocations = pAttachmentInitialSampleLocations_;
-      return *this;
-    }
-
-    RenderPassSampleLocationsBeginInfoEXT& setPostSubpassSampleLocationsCount( uint32_t postSubpassSampleLocationsCount_ )
-    {
-      postSubpassSampleLocationsCount = postSubpassSampleLocationsCount_;
-      return *this;
-    }
-
-    RenderPassSampleLocationsBeginInfoEXT& setPPostSubpassSampleLocations( const SubpassSampleLocationsEXT* pPostSubpassSampleLocations_ )
-    {
-      pPostSubpassSampleLocations = pPostSubpassSampleLocations_;
-      return *this;
-    }
-
-    operator VkRenderPassSampleLocationsBeginInfoEXT const&() const
-    {
-      return *reinterpret_cast<const VkRenderPassSampleLocationsBeginInfoEXT*>(this);
-    }
-
-    operator VkRenderPassSampleLocationsBeginInfoEXT &()
-    {
-      return *reinterpret_cast<VkRenderPassSampleLocationsBeginInfoEXT*>(this);
-    }
-
-    bool operator==( RenderPassSampleLocationsBeginInfoEXT const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( attachmentInitialSampleLocationsCount == rhs.attachmentInitialSampleLocationsCount )
-          && ( pAttachmentInitialSampleLocations == rhs.pAttachmentInitialSampleLocations )
-          && ( postSubpassSampleLocationsCount == rhs.postSubpassSampleLocationsCount )
-          && ( pPostSubpassSampleLocations == rhs.pPostSubpassSampleLocations );
-    }
-
-    bool operator!=( RenderPassSampleLocationsBeginInfoEXT const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eRenderPassSampleLocationsBeginInfoEXT;
-
-  public:
-    const void* pNext = nullptr;
-    uint32_t attachmentInitialSampleLocationsCount;
-    const AttachmentSampleLocationsEXT* pAttachmentInitialSampleLocations;
-    uint32_t postSubpassSampleLocationsCount;
-    const SubpassSampleLocationsEXT* pPostSubpassSampleLocations;
-  };
-  static_assert( sizeof( RenderPassSampleLocationsBeginInfoEXT ) == sizeof( VkRenderPassSampleLocationsBeginInfoEXT ), "struct and wrapper have different size!" );
-
-  struct PipelineSampleLocationsStateCreateInfoEXT
-  {
-    PipelineSampleLocationsStateCreateInfoEXT( Bool32 sampleLocationsEnable_ = 0,
-                                               SampleLocationsInfoEXT sampleLocationsInfo_ = SampleLocationsInfoEXT() )
-      : sampleLocationsEnable( sampleLocationsEnable_ )
-      , sampleLocationsInfo( sampleLocationsInfo_ )
-    {
-    }
-
-    PipelineSampleLocationsStateCreateInfoEXT( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PipelineSampleLocationsStateCreateInfoEXT ) );
-    }
-
-    PipelineSampleLocationsStateCreateInfoEXT& operator=( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PipelineSampleLocationsStateCreateInfoEXT ) );
-      return *this;
-    }
-    PipelineSampleLocationsStateCreateInfoEXT& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    PipelineSampleLocationsStateCreateInfoEXT& setSampleLocationsEnable( Bool32 sampleLocationsEnable_ )
-    {
-      sampleLocationsEnable = sampleLocationsEnable_;
-      return *this;
-    }
-
-    PipelineSampleLocationsStateCreateInfoEXT& setSampleLocationsInfo( SampleLocationsInfoEXT sampleLocationsInfo_ )
-    {
-      sampleLocationsInfo = sampleLocationsInfo_;
-      return *this;
-    }
-
-    operator VkPipelineSampleLocationsStateCreateInfoEXT const&() const
-    {
-      return *reinterpret_cast<const VkPipelineSampleLocationsStateCreateInfoEXT*>(this);
-    }
-
-    operator VkPipelineSampleLocationsStateCreateInfoEXT &()
-    {
-      return *reinterpret_cast<VkPipelineSampleLocationsStateCreateInfoEXT*>(this);
-    }
-
-    bool operator==( PipelineSampleLocationsStateCreateInfoEXT const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( sampleLocationsEnable == rhs.sampleLocationsEnable )
-          && ( sampleLocationsInfo == rhs.sampleLocationsInfo );
-    }
-
-    bool operator!=( PipelineSampleLocationsStateCreateInfoEXT const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePipelineSampleLocationsStateCreateInfoEXT;
-
-  public:
-    const void* pNext = nullptr;
-    Bool32 sampleLocationsEnable;
-    SampleLocationsInfoEXT sampleLocationsInfo;
-  };
-  static_assert( sizeof( PipelineSampleLocationsStateCreateInfoEXT ) == sizeof( VkPipelineSampleLocationsStateCreateInfoEXT ), "struct and wrapper have different size!" );
-
-  struct PhysicalDeviceSampleLocationsPropertiesEXT
-  {
-    operator VkPhysicalDeviceSampleLocationsPropertiesEXT const&() const
-    {
-      return *reinterpret_cast<const VkPhysicalDeviceSampleLocationsPropertiesEXT*>(this);
-    }
-
-    operator VkPhysicalDeviceSampleLocationsPropertiesEXT &()
-    {
-      return *reinterpret_cast<VkPhysicalDeviceSampleLocationsPropertiesEXT*>(this);
-    }
-
-    bool operator==( PhysicalDeviceSampleLocationsPropertiesEXT const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( sampleLocationSampleCounts == rhs.sampleLocationSampleCounts )
-          && ( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize )
-          && ( memcmp( sampleLocationCoordinateRange, rhs.sampleLocationCoordinateRange, 2 * sizeof( float ) ) == 0 )
-          && ( sampleLocationSubPixelBits == rhs.sampleLocationSubPixelBits )
-          && ( variableSampleLocations == rhs.variableSampleLocations );
-    }
-
-    bool operator!=( PhysicalDeviceSampleLocationsPropertiesEXT const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT;
-
-  public:
-    void* pNext = nullptr;
-    SampleCountFlags sampleLocationSampleCounts;
-    Extent2D maxSampleLocationGridSize;
-    float sampleLocationCoordinateRange[2];
-    uint32_t sampleLocationSubPixelBits;
-    Bool32 variableSampleLocations;
-  };
-  static_assert( sizeof( PhysicalDeviceSampleLocationsPropertiesEXT ) == sizeof( VkPhysicalDeviceSampleLocationsPropertiesEXT ), "struct and wrapper have different size!" );
-
-  enum class AttachmentDescriptionFlagBits
-  {
-    eMayAlias = VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT
-  };
-
-  using AttachmentDescriptionFlags = Flags<AttachmentDescriptionFlagBits, VkAttachmentDescriptionFlags>;
-
-  VULKAN_HPP_INLINE AttachmentDescriptionFlags operator|( AttachmentDescriptionFlagBits bit0, AttachmentDescriptionFlagBits bit1 )
-  {
-    return AttachmentDescriptionFlags( bit0 ) | bit1;
-  }
-
-  VULKAN_HPP_INLINE AttachmentDescriptionFlags operator~( AttachmentDescriptionFlagBits bits )
-  {
-    return ~( AttachmentDescriptionFlags( bits ) );
-  }
-
-  template <> struct FlagTraits<AttachmentDescriptionFlagBits>
-  {
-    enum
-    {
-      allFlags = VkFlags(AttachmentDescriptionFlagBits::eMayAlias)
-    };
-  };
-
-  struct AttachmentDescription
-  {
-    AttachmentDescription( AttachmentDescriptionFlags flags_ = AttachmentDescriptionFlags(),
-                           Format format_ = Format::eUndefined,
-                           SampleCountFlagBits samples_ = SampleCountFlagBits::e1,
-                           AttachmentLoadOp loadOp_ = AttachmentLoadOp::eLoad,
-                           AttachmentStoreOp storeOp_ = AttachmentStoreOp::eStore,
-                           AttachmentLoadOp stencilLoadOp_ = AttachmentLoadOp::eLoad,
-                           AttachmentStoreOp stencilStoreOp_ = AttachmentStoreOp::eStore,
-                           ImageLayout initialLayout_ = ImageLayout::eUndefined,
-                           ImageLayout finalLayout_ = ImageLayout::eUndefined )
-      : flags( flags_ )
-      , format( format_ )
-      , samples( samples_ )
-      , loadOp( loadOp_ )
-      , storeOp( storeOp_ )
-      , stencilLoadOp( stencilLoadOp_ )
-      , stencilStoreOp( stencilStoreOp_ )
-      , initialLayout( initialLayout_ )
-      , finalLayout( finalLayout_ )
-    {
-    }
-
-    AttachmentDescription( VkAttachmentDescription const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( AttachmentDescription ) );
-    }
-
-    AttachmentDescription& operator=( VkAttachmentDescription const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( AttachmentDescription ) );
-      return *this;
-    }
-    AttachmentDescription& setFlags( AttachmentDescriptionFlags flags_ )
-    {
-      flags = flags_;
-      return *this;
-    }
-
-    AttachmentDescription& setFormat( Format format_ )
-    {
-      format = format_;
-      return *this;
-    }
-
-    AttachmentDescription& setSamples( SampleCountFlagBits samples_ )
-    {
-      samples = samples_;
-      return *this;
-    }
-
-    AttachmentDescription& setLoadOp( AttachmentLoadOp loadOp_ )
-    {
-      loadOp = loadOp_;
-      return *this;
-    }
-
-    AttachmentDescription& setStoreOp( AttachmentStoreOp storeOp_ )
-    {
-      storeOp = storeOp_;
-      return *this;
-    }
-
-    AttachmentDescription& setStencilLoadOp( AttachmentLoadOp stencilLoadOp_ )
-    {
-      stencilLoadOp = stencilLoadOp_;
-      return *this;
-    }
-
-    AttachmentDescription& setStencilStoreOp( AttachmentStoreOp stencilStoreOp_ )
-    {
-      stencilStoreOp = stencilStoreOp_;
-      return *this;
-    }
-
-    AttachmentDescription& setInitialLayout( ImageLayout initialLayout_ )
-    {
-      initialLayout = initialLayout_;
-      return *this;
-    }
-
-    AttachmentDescription& setFinalLayout( ImageLayout finalLayout_ )
-    {
-      finalLayout = finalLayout_;
-      return *this;
-    }
-
-    operator VkAttachmentDescription const&() const
-    {
-      return *reinterpret_cast<const VkAttachmentDescription*>(this);
-    }
-
-    operator VkAttachmentDescription &()
-    {
-      return *reinterpret_cast<VkAttachmentDescription*>(this);
-    }
-
-    bool operator==( AttachmentDescription const& rhs ) const
-    {
-      return ( flags == rhs.flags )
-          && ( format == rhs.format )
-          && ( samples == rhs.samples )
-          && ( loadOp == rhs.loadOp )
-          && ( storeOp == rhs.storeOp )
-          && ( stencilLoadOp == rhs.stencilLoadOp )
-          && ( stencilStoreOp == rhs.stencilStoreOp )
-          && ( initialLayout == rhs.initialLayout )
-          && ( finalLayout == rhs.finalLayout );
-    }
-
-    bool operator!=( AttachmentDescription const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    AttachmentDescriptionFlags flags;
-    Format format;
-    SampleCountFlagBits samples;
-    AttachmentLoadOp loadOp;
-    AttachmentStoreOp storeOp;
-    AttachmentLoadOp stencilLoadOp;
-    AttachmentStoreOp stencilStoreOp;
-    ImageLayout initialLayout;
-    ImageLayout finalLayout;
-  };
-  static_assert( sizeof( AttachmentDescription ) == sizeof( VkAttachmentDescription ), "struct and wrapper have different size!" );
-
-  struct AttachmentDescription2KHR
-  {
-    AttachmentDescription2KHR( AttachmentDescriptionFlags flags_ = AttachmentDescriptionFlags(),
-                               Format format_ = Format::eUndefined,
-                               SampleCountFlagBits samples_ = SampleCountFlagBits::e1,
-                               AttachmentLoadOp loadOp_ = AttachmentLoadOp::eLoad,
-                               AttachmentStoreOp storeOp_ = AttachmentStoreOp::eStore,
-                               AttachmentLoadOp stencilLoadOp_ = AttachmentLoadOp::eLoad,
-                               AttachmentStoreOp stencilStoreOp_ = AttachmentStoreOp::eStore,
-                               ImageLayout initialLayout_ = ImageLayout::eUndefined,
-                               ImageLayout finalLayout_ = ImageLayout::eUndefined )
-      : flags( flags_ )
-      , format( format_ )
-      , samples( samples_ )
-      , loadOp( loadOp_ )
-      , storeOp( storeOp_ )
-      , stencilLoadOp( stencilLoadOp_ )
-      , stencilStoreOp( stencilStoreOp_ )
-      , initialLayout( initialLayout_ )
-      , finalLayout( finalLayout_ )
-    {
-    }
-
-    AttachmentDescription2KHR( VkAttachmentDescription2KHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( AttachmentDescription2KHR ) );
-    }
-
-    AttachmentDescription2KHR& operator=( VkAttachmentDescription2KHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( AttachmentDescription2KHR ) );
-      return *this;
-    }
-    AttachmentDescription2KHR& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    AttachmentDescription2KHR& setFlags( AttachmentDescriptionFlags flags_ )
-    {
-      flags = flags_;
-      return *this;
-    }
-
-    AttachmentDescription2KHR& setFormat( Format format_ )
-    {
-      format = format_;
-      return *this;
-    }
-
-    AttachmentDescription2KHR& setSamples( SampleCountFlagBits samples_ )
-    {
-      samples = samples_;
-      return *this;
-    }
-
-    AttachmentDescription2KHR& setLoadOp( AttachmentLoadOp loadOp_ )
-    {
-      loadOp = loadOp_;
-      return *this;
-    }
-
-    AttachmentDescription2KHR& setStoreOp( AttachmentStoreOp storeOp_ )
-    {
-      storeOp = storeOp_;
-      return *this;
-    }
-
-    AttachmentDescription2KHR& setStencilLoadOp( AttachmentLoadOp stencilLoadOp_ )
-    {
-      stencilLoadOp = stencilLoadOp_;
-      return *this;
-    }
-
-    AttachmentDescription2KHR& setStencilStoreOp( AttachmentStoreOp stencilStoreOp_ )
-    {
-      stencilStoreOp = stencilStoreOp_;
-      return *this;
-    }
-
-    AttachmentDescription2KHR& setInitialLayout( ImageLayout initialLayout_ )
-    {
-      initialLayout = initialLayout_;
-      return *this;
-    }
-
-    AttachmentDescription2KHR& setFinalLayout( ImageLayout finalLayout_ )
-    {
-      finalLayout = finalLayout_;
-      return *this;
-    }
-
-    operator VkAttachmentDescription2KHR const&() const
-    {
-      return *reinterpret_cast<const VkAttachmentDescription2KHR*>(this);
-    }
-
-    operator VkAttachmentDescription2KHR &()
-    {
-      return *reinterpret_cast<VkAttachmentDescription2KHR*>(this);
-    }
-
-    bool operator==( AttachmentDescription2KHR const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( format == rhs.format )
-          && ( samples == rhs.samples )
-          && ( loadOp == rhs.loadOp )
-          && ( storeOp == rhs.storeOp )
-          && ( stencilLoadOp == rhs.stencilLoadOp )
-          && ( stencilStoreOp == rhs.stencilStoreOp )
-          && ( initialLayout == rhs.initialLayout )
-          && ( finalLayout == rhs.finalLayout );
-    }
-
-    bool operator!=( AttachmentDescription2KHR const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eAttachmentDescription2KHR;
-
-  public:
-    const void* pNext = nullptr;
-    AttachmentDescriptionFlags flags;
-    Format format;
-    SampleCountFlagBits samples;
-    AttachmentLoadOp loadOp;
-    AttachmentStoreOp storeOp;
-    AttachmentLoadOp stencilLoadOp;
-    AttachmentStoreOp stencilStoreOp;
-    ImageLayout initialLayout;
-    ImageLayout finalLayout;
-  };
-  static_assert( sizeof( AttachmentDescription2KHR ) == sizeof( VkAttachmentDescription2KHR ), "struct and wrapper have different size!" );
-
-  enum class StencilFaceFlagBits
-  {
-    eFront = VK_STENCIL_FACE_FRONT_BIT,
-    eBack = VK_STENCIL_FACE_BACK_BIT,
-    eVkStencilFrontAndBack = VK_STENCIL_FRONT_AND_BACK
-  };
-
-  using StencilFaceFlags = Flags<StencilFaceFlagBits, VkStencilFaceFlags>;
-
-  VULKAN_HPP_INLINE StencilFaceFlags operator|( StencilFaceFlagBits bit0, StencilFaceFlagBits bit1 )
-  {
-    return StencilFaceFlags( bit0 ) | bit1;
-  }
-
-  VULKAN_HPP_INLINE StencilFaceFlags operator~( StencilFaceFlagBits bits )
-  {
-    return ~( StencilFaceFlags( bits ) );
-  }
-
-  template <> struct FlagTraits<StencilFaceFlagBits>
-  {
-    enum
-    {
-      allFlags = VkFlags(StencilFaceFlagBits::eFront) | VkFlags(StencilFaceFlagBits::eBack) | VkFlags(StencilFaceFlagBits::eVkStencilFrontAndBack)
-    };
-  };
-
-  enum class DescriptorPoolCreateFlagBits
-  {
-    eFreeDescriptorSet = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT,
-    eUpdateAfterBindEXT = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT
-  };
-
-  using DescriptorPoolCreateFlags = Flags<DescriptorPoolCreateFlagBits, VkDescriptorPoolCreateFlags>;
-
-  VULKAN_HPP_INLINE DescriptorPoolCreateFlags operator|( DescriptorPoolCreateFlagBits bit0, DescriptorPoolCreateFlagBits bit1 )
-  {
-    return DescriptorPoolCreateFlags( bit0 ) | bit1;
-  }
-
-  VULKAN_HPP_INLINE DescriptorPoolCreateFlags operator~( DescriptorPoolCreateFlagBits bits )
-  {
-    return ~( DescriptorPoolCreateFlags( bits ) );
-  }
-
-  template <> struct FlagTraits<DescriptorPoolCreateFlagBits>
-  {
-    enum
-    {
-      allFlags = VkFlags(DescriptorPoolCreateFlagBits::eFreeDescriptorSet) | VkFlags(DescriptorPoolCreateFlagBits::eUpdateAfterBindEXT)
-    };
-  };
-
-  struct DescriptorPoolCreateInfo
-  {
-    DescriptorPoolCreateInfo( DescriptorPoolCreateFlags flags_ = DescriptorPoolCreateFlags(),
-                              uint32_t maxSets_ = 0,
-                              uint32_t poolSizeCount_ = 0,
-                              const DescriptorPoolSize* pPoolSizes_ = nullptr )
-      : flags( flags_ )
-      , maxSets( maxSets_ )
-      , poolSizeCount( poolSizeCount_ )
-      , pPoolSizes( pPoolSizes_ )
-    {
-    }
-
-    DescriptorPoolCreateInfo( VkDescriptorPoolCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DescriptorPoolCreateInfo ) );
-    }
-
-    DescriptorPoolCreateInfo& operator=( VkDescriptorPoolCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DescriptorPoolCreateInfo ) );
-      return *this;
-    }
-    DescriptorPoolCreateInfo& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    DescriptorPoolCreateInfo& setFlags( DescriptorPoolCreateFlags flags_ )
-    {
-      flags = flags_;
-      return *this;
-    }
-
-    DescriptorPoolCreateInfo& setMaxSets( uint32_t maxSets_ )
-    {
-      maxSets = maxSets_;
-      return *this;
-    }
-
-    DescriptorPoolCreateInfo& setPoolSizeCount( uint32_t poolSizeCount_ )
-    {
-      poolSizeCount = poolSizeCount_;
-      return *this;
-    }
-
-    DescriptorPoolCreateInfo& setPPoolSizes( const DescriptorPoolSize* pPoolSizes_ )
-    {
-      pPoolSizes = pPoolSizes_;
-      return *this;
-    }
-
-    operator VkDescriptorPoolCreateInfo const&() const
-    {
-      return *reinterpret_cast<const VkDescriptorPoolCreateInfo*>(this);
-    }
-
-    operator VkDescriptorPoolCreateInfo &()
-    {
-      return *reinterpret_cast<VkDescriptorPoolCreateInfo*>(this);
-    }
-
-    bool operator==( DescriptorPoolCreateInfo const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( maxSets == rhs.maxSets )
-          && ( poolSizeCount == rhs.poolSizeCount )
-          && ( pPoolSizes == rhs.pPoolSizes );
-    }
-
-    bool operator!=( DescriptorPoolCreateInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eDescriptorPoolCreateInfo;
-
-  public:
-    const void* pNext = nullptr;
-    DescriptorPoolCreateFlags flags;
-    uint32_t maxSets;
-    uint32_t poolSizeCount;
-    const DescriptorPoolSize* pPoolSizes;
-  };
-  static_assert( sizeof( DescriptorPoolCreateInfo ) == sizeof( VkDescriptorPoolCreateInfo ), "struct and wrapper have different size!" );
-
-  enum class DependencyFlagBits
-  {
-    eByRegion = VK_DEPENDENCY_BY_REGION_BIT,
-    eDeviceGroup = VK_DEPENDENCY_DEVICE_GROUP_BIT,
-    eDeviceGroupKHR = VK_DEPENDENCY_DEVICE_GROUP_BIT,
-    eViewLocal = VK_DEPENDENCY_VIEW_LOCAL_BIT,
-    eViewLocalKHR = VK_DEPENDENCY_VIEW_LOCAL_BIT
-  };
-
-  using DependencyFlags = Flags<DependencyFlagBits, VkDependencyFlags>;
-
-  VULKAN_HPP_INLINE DependencyFlags operator|( DependencyFlagBits bit0, DependencyFlagBits bit1 )
-  {
-    return DependencyFlags( bit0 ) | bit1;
-  }
-
-  VULKAN_HPP_INLINE DependencyFlags operator~( DependencyFlagBits bits )
-  {
-    return ~( DependencyFlags( bits ) );
-  }
-
-  template <> struct FlagTraits<DependencyFlagBits>
-  {
-    enum
-    {
-      allFlags = VkFlags(DependencyFlagBits::eByRegion) | VkFlags(DependencyFlagBits::eDeviceGroup) | VkFlags(DependencyFlagBits::eViewLocal)
-    };
-  };
-
-  struct SubpassDependency
-  {
-    SubpassDependency( uint32_t srcSubpass_ = 0,
-                       uint32_t dstSubpass_ = 0,
-                       PipelineStageFlags srcStageMask_ = PipelineStageFlags(),
-                       PipelineStageFlags dstStageMask_ = PipelineStageFlags(),
-                       AccessFlags srcAccessMask_ = AccessFlags(),
-                       AccessFlags dstAccessMask_ = AccessFlags(),
-                       DependencyFlags dependencyFlags_ = DependencyFlags() )
-      : srcSubpass( srcSubpass_ )
-      , dstSubpass( dstSubpass_ )
-      , srcStageMask( srcStageMask_ )
-      , dstStageMask( dstStageMask_ )
-      , srcAccessMask( srcAccessMask_ )
-      , dstAccessMask( dstAccessMask_ )
-      , dependencyFlags( dependencyFlags_ )
-    {
-    }
-
-    SubpassDependency( VkSubpassDependency const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( SubpassDependency ) );
-    }
-
-    SubpassDependency& operator=( VkSubpassDependency const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( SubpassDependency ) );
-      return *this;
-    }
-    SubpassDependency& setSrcSubpass( uint32_t srcSubpass_ )
-    {
-      srcSubpass = srcSubpass_;
-      return *this;
-    }
-
-    SubpassDependency& setDstSubpass( uint32_t dstSubpass_ )
-    {
-      dstSubpass = dstSubpass_;
-      return *this;
-    }
-
-    SubpassDependency& setSrcStageMask( PipelineStageFlags srcStageMask_ )
-    {
-      srcStageMask = srcStageMask_;
-      return *this;
-    }
-
-    SubpassDependency& setDstStageMask( PipelineStageFlags dstStageMask_ )
-    {
-      dstStageMask = dstStageMask_;
-      return *this;
-    }
-
-    SubpassDependency& setSrcAccessMask( AccessFlags srcAccessMask_ )
-    {
-      srcAccessMask = srcAccessMask_;
-      return *this;
-    }
-
-    SubpassDependency& setDstAccessMask( AccessFlags dstAccessMask_ )
-    {
-      dstAccessMask = dstAccessMask_;
-      return *this;
-    }
-
-    SubpassDependency& setDependencyFlags( DependencyFlags dependencyFlags_ )
-    {
-      dependencyFlags = dependencyFlags_;
-      return *this;
-    }
-
-    operator VkSubpassDependency const&() const
-    {
-      return *reinterpret_cast<const VkSubpassDependency*>(this);
-    }
-
-    operator VkSubpassDependency &()
-    {
-      return *reinterpret_cast<VkSubpassDependency*>(this);
-    }
-
-    bool operator==( SubpassDependency const& rhs ) const
-    {
-      return ( srcSubpass == rhs.srcSubpass )
-          && ( dstSubpass == rhs.dstSubpass )
-          && ( srcStageMask == rhs.srcStageMask )
-          && ( dstStageMask == rhs.dstStageMask )
-          && ( srcAccessMask == rhs.srcAccessMask )
-          && ( dstAccessMask == rhs.dstAccessMask )
-          && ( dependencyFlags == rhs.dependencyFlags );
-    }
-
-    bool operator!=( SubpassDependency const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    uint32_t srcSubpass;
-    uint32_t dstSubpass;
-    PipelineStageFlags srcStageMask;
-    PipelineStageFlags dstStageMask;
-    AccessFlags srcAccessMask;
-    AccessFlags dstAccessMask;
-    DependencyFlags dependencyFlags;
-  };
-  static_assert( sizeof( SubpassDependency ) == sizeof( VkSubpassDependency ), "struct and wrapper have different size!" );
-
-  struct SubpassDependency2KHR
-  {
-    SubpassDependency2KHR( uint32_t srcSubpass_ = 0,
-                           uint32_t dstSubpass_ = 0,
-                           PipelineStageFlags srcStageMask_ = PipelineStageFlags(),
-                           PipelineStageFlags dstStageMask_ = PipelineStageFlags(),
-                           AccessFlags srcAccessMask_ = AccessFlags(),
-                           AccessFlags dstAccessMask_ = AccessFlags(),
-                           DependencyFlags dependencyFlags_ = DependencyFlags(),
-                           int32_t viewOffset_ = 0 )
-      : srcSubpass( srcSubpass_ )
-      , dstSubpass( dstSubpass_ )
-      , srcStageMask( srcStageMask_ )
-      , dstStageMask( dstStageMask_ )
-      , srcAccessMask( srcAccessMask_ )
-      , dstAccessMask( dstAccessMask_ )
-      , dependencyFlags( dependencyFlags_ )
-      , viewOffset( viewOffset_ )
-    {
-    }
-
-    SubpassDependency2KHR( VkSubpassDependency2KHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( SubpassDependency2KHR ) );
-    }
-
-    SubpassDependency2KHR& operator=( VkSubpassDependency2KHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( SubpassDependency2KHR ) );
-      return *this;
-    }
-    SubpassDependency2KHR& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    SubpassDependency2KHR& setSrcSubpass( uint32_t srcSubpass_ )
-    {
-      srcSubpass = srcSubpass_;
-      return *this;
-    }
-
-    SubpassDependency2KHR& setDstSubpass( uint32_t dstSubpass_ )
-    {
-      dstSubpass = dstSubpass_;
-      return *this;
-    }
-
-    SubpassDependency2KHR& setSrcStageMask( PipelineStageFlags srcStageMask_ )
-    {
-      srcStageMask = srcStageMask_;
-      return *this;
-    }
-
-    SubpassDependency2KHR& setDstStageMask( PipelineStageFlags dstStageMask_ )
-    {
-      dstStageMask = dstStageMask_;
-      return *this;
-    }
-
-    SubpassDependency2KHR& setSrcAccessMask( AccessFlags srcAccessMask_ )
-    {
-      srcAccessMask = srcAccessMask_;
-      return *this;
-    }
-
-    SubpassDependency2KHR& setDstAccessMask( AccessFlags dstAccessMask_ )
-    {
-      dstAccessMask = dstAccessMask_;
-      return *this;
-    }
-
-    SubpassDependency2KHR& setDependencyFlags( DependencyFlags dependencyFlags_ )
-    {
-      dependencyFlags = dependencyFlags_;
-      return *this;
-    }
-
-    SubpassDependency2KHR& setViewOffset( int32_t viewOffset_ )
-    {
-      viewOffset = viewOffset_;
-      return *this;
-    }
-
-    operator VkSubpassDependency2KHR const&() const
-    {
-      return *reinterpret_cast<const VkSubpassDependency2KHR*>(this);
-    }
-
-    operator VkSubpassDependency2KHR &()
-    {
-      return *reinterpret_cast<VkSubpassDependency2KHR*>(this);
-    }
-
-    bool operator==( SubpassDependency2KHR const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( srcSubpass == rhs.srcSubpass )
-          && ( dstSubpass == rhs.dstSubpass )
-          && ( srcStageMask == rhs.srcStageMask )
-          && ( dstStageMask == rhs.dstStageMask )
-          && ( srcAccessMask == rhs.srcAccessMask )
-          && ( dstAccessMask == rhs.dstAccessMask )
-          && ( dependencyFlags == rhs.dependencyFlags )
-          && ( viewOffset == rhs.viewOffset );
-    }
-
-    bool operator!=( SubpassDependency2KHR const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eSubpassDependency2KHR;
-
-  public:
-    const void* pNext = nullptr;
-    uint32_t srcSubpass;
-    uint32_t dstSubpass;
-    PipelineStageFlags srcStageMask;
-    PipelineStageFlags dstStageMask;
-    AccessFlags srcAccessMask;
-    AccessFlags dstAccessMask;
-    DependencyFlags dependencyFlags;
-    int32_t viewOffset;
-  };
-  static_assert( sizeof( SubpassDependency2KHR ) == sizeof( VkSubpassDependency2KHR ), "struct and wrapper have different size!" );
-
-  enum class PresentModeKHR
-  {
-    eImmediate = VK_PRESENT_MODE_IMMEDIATE_KHR,
-    eMailbox = VK_PRESENT_MODE_MAILBOX_KHR,
-    eFifo = VK_PRESENT_MODE_FIFO_KHR,
-    eFifoRelaxed = VK_PRESENT_MODE_FIFO_RELAXED_KHR,
-    eSharedDemandRefresh = VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR,
-    eSharedContinuousRefresh = VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR
-  };
-
-  enum class ColorSpaceKHR
-  {
-    eSrgbNonlinear = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR,
-    eVkColorspaceSrgbNonlinear = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR,
-    eDisplayP3NonlinearEXT = VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT,
-    eExtendedSrgbLinearEXT = VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT,
-    eDciP3LinearEXT = VK_COLOR_SPACE_DCI_P3_LINEAR_EXT,
-    eDciP3NonlinearEXT = VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT,
-    eBt709LinearEXT = VK_COLOR_SPACE_BT709_LINEAR_EXT,
-    eBt709NonlinearEXT = VK_COLOR_SPACE_BT709_NONLINEAR_EXT,
-    eBt2020LinearEXT = VK_COLOR_SPACE_BT2020_LINEAR_EXT,
-    eHdr10St2084EXT = VK_COLOR_SPACE_HDR10_ST2084_EXT,
-    eDolbyvisionEXT = VK_COLOR_SPACE_DOLBYVISION_EXT,
-    eHdr10HlgEXT = VK_COLOR_SPACE_HDR10_HLG_EXT,
-    eAdobergbLinearEXT = VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT,
-    eAdobergbNonlinearEXT = VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT,
-    ePassThroughEXT = VK_COLOR_SPACE_PASS_THROUGH_EXT,
-    eExtendedSrgbNonlinearEXT = VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT
-  };
-
-  struct SurfaceFormatKHR
-  {
-    operator VkSurfaceFormatKHR const&() const
-    {
-      return *reinterpret_cast<const VkSurfaceFormatKHR*>(this);
-    }
-
-    operator VkSurfaceFormatKHR &()
-    {
-      return *reinterpret_cast<VkSurfaceFormatKHR*>(this);
-    }
-
-    bool operator==( SurfaceFormatKHR const& rhs ) const
-    {
-      return ( format == rhs.format )
-          && ( colorSpace == rhs.colorSpace );
-    }
-
-    bool operator!=( SurfaceFormatKHR const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    Format format;
-    ColorSpaceKHR colorSpace;
-  };
-  static_assert( sizeof( SurfaceFormatKHR ) == sizeof( VkSurfaceFormatKHR ), "struct and wrapper have different size!" );
-
-  struct SurfaceFormat2KHR
-  {
-    operator VkSurfaceFormat2KHR const&() const
-    {
-      return *reinterpret_cast<const VkSurfaceFormat2KHR*>(this);
-    }
-
-    operator VkSurfaceFormat2KHR &()
-    {
-      return *reinterpret_cast<VkSurfaceFormat2KHR*>(this);
-    }
-
-    bool operator==( SurfaceFormat2KHR const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( surfaceFormat == rhs.surfaceFormat );
-    }
-
-    bool operator!=( SurfaceFormat2KHR const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eSurfaceFormat2KHR;
-
-  public:
-    void* pNext = nullptr;
-    SurfaceFormatKHR surfaceFormat;
-  };
-  static_assert( sizeof( SurfaceFormat2KHR ) == sizeof( VkSurfaceFormat2KHR ), "struct and wrapper have different size!" );
-
-  enum class DisplayPlaneAlphaFlagBitsKHR
-  {
-    eOpaque = VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR,
-    eGlobal = VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR,
-    ePerPixel = VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR,
-    ePerPixelPremultiplied = VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR
-  };
-
-  using DisplayPlaneAlphaFlagsKHR = Flags<DisplayPlaneAlphaFlagBitsKHR, VkDisplayPlaneAlphaFlagsKHR>;
-
-  VULKAN_HPP_INLINE DisplayPlaneAlphaFlagsKHR operator|( DisplayPlaneAlphaFlagBitsKHR bit0, DisplayPlaneAlphaFlagBitsKHR bit1 )
-  {
-    return DisplayPlaneAlphaFlagsKHR( bit0 ) | bit1;
-  }
-
-  VULKAN_HPP_INLINE DisplayPlaneAlphaFlagsKHR operator~( DisplayPlaneAlphaFlagBitsKHR bits )
-  {
-    return ~( DisplayPlaneAlphaFlagsKHR( bits ) );
-  }
-
-  template <> struct FlagTraits<DisplayPlaneAlphaFlagBitsKHR>
-  {
-    enum
-    {
-      allFlags = VkFlags(DisplayPlaneAlphaFlagBitsKHR::eOpaque) | VkFlags(DisplayPlaneAlphaFlagBitsKHR::eGlobal) | VkFlags(DisplayPlaneAlphaFlagBitsKHR::ePerPixel) | VkFlags(DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied)
-    };
-  };
-
-  struct DisplayPlaneCapabilitiesKHR
-  {
-    operator VkDisplayPlaneCapabilitiesKHR const&() const
-    {
-      return *reinterpret_cast<const VkDisplayPlaneCapabilitiesKHR*>(this);
-    }
-
-    operator VkDisplayPlaneCapabilitiesKHR &()
-    {
-      return *reinterpret_cast<VkDisplayPlaneCapabilitiesKHR*>(this);
-    }
-
-    bool operator==( DisplayPlaneCapabilitiesKHR const& rhs ) const
-    {
-      return ( supportedAlpha == rhs.supportedAlpha )
-          && ( minSrcPosition == rhs.minSrcPosition )
-          && ( maxSrcPosition == rhs.maxSrcPosition )
-          && ( minSrcExtent == rhs.minSrcExtent )
-          && ( maxSrcExtent == rhs.maxSrcExtent )
-          && ( minDstPosition == rhs.minDstPosition )
-          && ( maxDstPosition == rhs.maxDstPosition )
-          && ( minDstExtent == rhs.minDstExtent )
-          && ( maxDstExtent == rhs.maxDstExtent );
-    }
-
-    bool operator!=( DisplayPlaneCapabilitiesKHR const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    DisplayPlaneAlphaFlagsKHR supportedAlpha;
-    Offset2D minSrcPosition;
-    Offset2D maxSrcPosition;
-    Extent2D minSrcExtent;
-    Extent2D maxSrcExtent;
-    Offset2D minDstPosition;
-    Offset2D maxDstPosition;
-    Extent2D minDstExtent;
-    Extent2D maxDstExtent;
-  };
-  static_assert( sizeof( DisplayPlaneCapabilitiesKHR ) == sizeof( VkDisplayPlaneCapabilitiesKHR ), "struct and wrapper have different size!" );
-
-  struct DisplayPlaneCapabilities2KHR
-  {
-    operator VkDisplayPlaneCapabilities2KHR const&() const
-    {
-      return *reinterpret_cast<const VkDisplayPlaneCapabilities2KHR*>(this);
-    }
-
-    operator VkDisplayPlaneCapabilities2KHR &()
-    {
-      return *reinterpret_cast<VkDisplayPlaneCapabilities2KHR*>(this);
-    }
-
-    bool operator==( DisplayPlaneCapabilities2KHR const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( capabilities == rhs.capabilities );
-    }
-
-    bool operator!=( DisplayPlaneCapabilities2KHR const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eDisplayPlaneCapabilities2KHR;
-
-  public:
-    void* pNext = nullptr;
-    DisplayPlaneCapabilitiesKHR capabilities;
-  };
-  static_assert( sizeof( DisplayPlaneCapabilities2KHR ) == sizeof( VkDisplayPlaneCapabilities2KHR ), "struct and wrapper have different size!" );
-
-  enum class CompositeAlphaFlagBitsKHR
-  {
-    eOpaque = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR,
-    ePreMultiplied = VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR,
-    ePostMultiplied = VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR,
-    eInherit = VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR
-  };
-
-  using CompositeAlphaFlagsKHR = Flags<CompositeAlphaFlagBitsKHR, VkCompositeAlphaFlagsKHR>;
-
-  VULKAN_HPP_INLINE CompositeAlphaFlagsKHR operator|( CompositeAlphaFlagBitsKHR bit0, CompositeAlphaFlagBitsKHR bit1 )
-  {
-    return CompositeAlphaFlagsKHR( bit0 ) | bit1;
-  }
-
-  VULKAN_HPP_INLINE CompositeAlphaFlagsKHR operator~( CompositeAlphaFlagBitsKHR bits )
-  {
-    return ~( CompositeAlphaFlagsKHR( bits ) );
-  }
-
-  template <> struct FlagTraits<CompositeAlphaFlagBitsKHR>
-  {
-    enum
-    {
-      allFlags = VkFlags(CompositeAlphaFlagBitsKHR::eOpaque) | VkFlags(CompositeAlphaFlagBitsKHR::ePreMultiplied) | VkFlags(CompositeAlphaFlagBitsKHR::ePostMultiplied) | VkFlags(CompositeAlphaFlagBitsKHR::eInherit)
-    };
-  };
-
-  enum class SurfaceTransformFlagBitsKHR
-  {
-    eIdentity = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR,
-    eRotate90 = VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR,
-    eRotate180 = VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR,
-    eRotate270 = VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR,
-    eHorizontalMirror = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR,
-    eHorizontalMirrorRotate90 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR,
-    eHorizontalMirrorRotate180 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR,
-    eHorizontalMirrorRotate270 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR,
-    eInherit = VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR
-  };
-
-  using SurfaceTransformFlagsKHR = Flags<SurfaceTransformFlagBitsKHR, VkSurfaceTransformFlagsKHR>;
-
-  VULKAN_HPP_INLINE SurfaceTransformFlagsKHR operator|( SurfaceTransformFlagBitsKHR bit0, SurfaceTransformFlagBitsKHR bit1 )
-  {
-    return SurfaceTransformFlagsKHR( bit0 ) | bit1;
-  }
-
-  VULKAN_HPP_INLINE SurfaceTransformFlagsKHR operator~( SurfaceTransformFlagBitsKHR bits )
-  {
-    return ~( SurfaceTransformFlagsKHR( bits ) );
-  }
-
-  template <> struct FlagTraits<SurfaceTransformFlagBitsKHR>
-  {
-    enum
-    {
-      allFlags = VkFlags(SurfaceTransformFlagBitsKHR::eIdentity) | VkFlags(SurfaceTransformFlagBitsKHR::eRotate90) | VkFlags(SurfaceTransformFlagBitsKHR::eRotate180) | VkFlags(SurfaceTransformFlagBitsKHR::eRotate270) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirror) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270) | VkFlags(SurfaceTransformFlagBitsKHR::eInherit)
-    };
-  };
-
-  struct DisplayPropertiesKHR
-  {
-    operator VkDisplayPropertiesKHR const&() const
-    {
-      return *reinterpret_cast<const VkDisplayPropertiesKHR*>(this);
-    }
-
-    operator VkDisplayPropertiesKHR &()
-    {
-      return *reinterpret_cast<VkDisplayPropertiesKHR*>(this);
-    }
-
-    bool operator==( DisplayPropertiesKHR const& rhs ) const
-    {
-      return ( display == rhs.display )
-          && ( displayName == rhs.displayName )
-          && ( physicalDimensions == rhs.physicalDimensions )
-          && ( physicalResolution == rhs.physicalResolution )
-          && ( supportedTransforms == rhs.supportedTransforms )
-          && ( planeReorderPossible == rhs.planeReorderPossible )
-          && ( persistentContent == rhs.persistentContent );
-    }
-
-    bool operator!=( DisplayPropertiesKHR const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    DisplayKHR display;
-    const char* displayName;
-    Extent2D physicalDimensions;
-    Extent2D physicalResolution;
-    SurfaceTransformFlagsKHR supportedTransforms;
-    Bool32 planeReorderPossible;
-    Bool32 persistentContent;
-  };
-  static_assert( sizeof( DisplayPropertiesKHR ) == sizeof( VkDisplayPropertiesKHR ), "struct and wrapper have different size!" );
-
-  struct DisplaySurfaceCreateInfoKHR
-  {
-    DisplaySurfaceCreateInfoKHR( DisplaySurfaceCreateFlagsKHR flags_ = DisplaySurfaceCreateFlagsKHR(),
-                                 DisplayModeKHR displayMode_ = DisplayModeKHR(),
-                                 uint32_t planeIndex_ = 0,
-                                 uint32_t planeStackIndex_ = 0,
-                                 SurfaceTransformFlagBitsKHR transform_ = SurfaceTransformFlagBitsKHR::eIdentity,
-                                 float globalAlpha_ = 0,
-                                 DisplayPlaneAlphaFlagBitsKHR alphaMode_ = DisplayPlaneAlphaFlagBitsKHR::eOpaque,
-                                 Extent2D imageExtent_ = Extent2D() )
-      : flags( flags_ )
-      , displayMode( displayMode_ )
-      , planeIndex( planeIndex_ )
-      , planeStackIndex( planeStackIndex_ )
-      , transform( transform_ )
-      , globalAlpha( globalAlpha_ )
-      , alphaMode( alphaMode_ )
-      , imageExtent( imageExtent_ )
-    {
-    }
-
-    DisplaySurfaceCreateInfoKHR( VkDisplaySurfaceCreateInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DisplaySurfaceCreateInfoKHR ) );
-    }
-
-    DisplaySurfaceCreateInfoKHR& operator=( VkDisplaySurfaceCreateInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DisplaySurfaceCreateInfoKHR ) );
-      return *this;
-    }
-    DisplaySurfaceCreateInfoKHR& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    DisplaySurfaceCreateInfoKHR& setFlags( DisplaySurfaceCreateFlagsKHR flags_ )
-    {
-      flags = flags_;
-      return *this;
-    }
-
-    DisplaySurfaceCreateInfoKHR& setDisplayMode( DisplayModeKHR displayMode_ )
-    {
-      displayMode = displayMode_;
-      return *this;
-    }
-
-    DisplaySurfaceCreateInfoKHR& setPlaneIndex( uint32_t planeIndex_ )
-    {
-      planeIndex = planeIndex_;
-      return *this;
-    }
-
-    DisplaySurfaceCreateInfoKHR& setPlaneStackIndex( uint32_t planeStackIndex_ )
-    {
-      planeStackIndex = planeStackIndex_;
-      return *this;
-    }
-
-    DisplaySurfaceCreateInfoKHR& setTransform( SurfaceTransformFlagBitsKHR transform_ )
-    {
-      transform = transform_;
-      return *this;
-    }
-
-    DisplaySurfaceCreateInfoKHR& setGlobalAlpha( float globalAlpha_ )
-    {
-      globalAlpha = globalAlpha_;
-      return *this;
-    }
-
-    DisplaySurfaceCreateInfoKHR& setAlphaMode( DisplayPlaneAlphaFlagBitsKHR alphaMode_ )
-    {
-      alphaMode = alphaMode_;
-      return *this;
-    }
-
-    DisplaySurfaceCreateInfoKHR& setImageExtent( Extent2D imageExtent_ )
-    {
-      imageExtent = imageExtent_;
-      return *this;
-    }
-
-    operator VkDisplaySurfaceCreateInfoKHR const&() const
-    {
-      return *reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR*>(this);
-    }
-
-    operator VkDisplaySurfaceCreateInfoKHR &()
-    {
-      return *reinterpret_cast<VkDisplaySurfaceCreateInfoKHR*>(this);
-    }
-
-    bool operator==( DisplaySurfaceCreateInfoKHR const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( displayMode == rhs.displayMode )
-          && ( planeIndex == rhs.planeIndex )
-          && ( planeStackIndex == rhs.planeStackIndex )
-          && ( transform == rhs.transform )
-          && ( globalAlpha == rhs.globalAlpha )
-          && ( alphaMode == rhs.alphaMode )
-          && ( imageExtent == rhs.imageExtent );
-    }
-
-    bool operator!=( DisplaySurfaceCreateInfoKHR const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eDisplaySurfaceCreateInfoKHR;
-
-  public:
-    const void* pNext = nullptr;
-    DisplaySurfaceCreateFlagsKHR flags;
-    DisplayModeKHR displayMode;
-    uint32_t planeIndex;
-    uint32_t planeStackIndex;
-    SurfaceTransformFlagBitsKHR transform;
-    float globalAlpha;
-    DisplayPlaneAlphaFlagBitsKHR alphaMode;
-    Extent2D imageExtent;
-  };
-  static_assert( sizeof( DisplaySurfaceCreateInfoKHR ) == sizeof( VkDisplaySurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
-
-  struct SurfaceCapabilitiesKHR
-  {
-    operator VkSurfaceCapabilitiesKHR const&() const
-    {
-      return *reinterpret_cast<const VkSurfaceCapabilitiesKHR*>(this);
-    }
-
-    operator VkSurfaceCapabilitiesKHR &()
-    {
-      return *reinterpret_cast<VkSurfaceCapabilitiesKHR*>(this);
-    }
-
-    bool operator==( SurfaceCapabilitiesKHR const& rhs ) const
-    {
-      return ( minImageCount == rhs.minImageCount )
-          && ( maxImageCount == rhs.maxImageCount )
-          && ( currentExtent == rhs.currentExtent )
-          && ( minImageExtent == rhs.minImageExtent )
-          && ( maxImageExtent == rhs.maxImageExtent )
-          && ( maxImageArrayLayers == rhs.maxImageArrayLayers )
-          && ( supportedTransforms == rhs.supportedTransforms )
-          && ( currentTransform == rhs.currentTransform )
-          && ( supportedCompositeAlpha == rhs.supportedCompositeAlpha )
-          && ( supportedUsageFlags == rhs.supportedUsageFlags );
-    }
-
-    bool operator!=( SurfaceCapabilitiesKHR const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    uint32_t minImageCount;
-    uint32_t maxImageCount;
-    Extent2D currentExtent;
-    Extent2D minImageExtent;
-    Extent2D maxImageExtent;
-    uint32_t maxImageArrayLayers;
-    SurfaceTransformFlagsKHR supportedTransforms;
-    SurfaceTransformFlagBitsKHR currentTransform;
-    CompositeAlphaFlagsKHR supportedCompositeAlpha;
-    ImageUsageFlags supportedUsageFlags;
-  };
-  static_assert( sizeof( SurfaceCapabilitiesKHR ) == sizeof( VkSurfaceCapabilitiesKHR ), "struct and wrapper have different size!" );
-
-  struct SurfaceCapabilities2KHR
-  {
-    operator VkSurfaceCapabilities2KHR const&() const
-    {
-      return *reinterpret_cast<const VkSurfaceCapabilities2KHR*>(this);
-    }
-
-    operator VkSurfaceCapabilities2KHR &()
-    {
-      return *reinterpret_cast<VkSurfaceCapabilities2KHR*>(this);
-    }
-
-    bool operator==( SurfaceCapabilities2KHR const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( surfaceCapabilities == rhs.surfaceCapabilities );
-    }
-
-    bool operator!=( SurfaceCapabilities2KHR const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eSurfaceCapabilities2KHR;
-
-  public:
-    void* pNext = nullptr;
-    SurfaceCapabilitiesKHR surfaceCapabilities;
-  };
-  static_assert( sizeof( SurfaceCapabilities2KHR ) == sizeof( VkSurfaceCapabilities2KHR ), "struct and wrapper have different size!" );
-
-  struct DisplayProperties2KHR
-  {
-    operator VkDisplayProperties2KHR const&() const
-    {
-      return *reinterpret_cast<const VkDisplayProperties2KHR*>(this);
-    }
-
-    operator VkDisplayProperties2KHR &()
-    {
-      return *reinterpret_cast<VkDisplayProperties2KHR*>(this);
-    }
-
-    bool operator==( DisplayProperties2KHR const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( displayProperties == rhs.displayProperties );
-    }
-
-    bool operator!=( DisplayProperties2KHR const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eDisplayProperties2KHR;
-
-  public:
-    void* pNext = nullptr;
-    DisplayPropertiesKHR displayProperties;
-  };
-  static_assert( sizeof( DisplayProperties2KHR ) == sizeof( VkDisplayProperties2KHR ), "struct and wrapper have different size!" );
-
-  enum class TimeDomainEXT
-  {
-    eDevice = VK_TIME_DOMAIN_DEVICE_EXT,
-    eClockMonotonic = VK_TIME_DOMAIN_CLOCK_MONOTONIC_EXT,
-    eClockMonotonicRaw = VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_EXT,
-    eQueryPerformanceCounter = VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_EXT
-  };
-
-  struct CalibratedTimestampInfoEXT
-  {
-    CalibratedTimestampInfoEXT( TimeDomainEXT timeDomain_ = TimeDomainEXT::eDevice )
-      : timeDomain( timeDomain_ )
-    {
-    }
-
-    CalibratedTimestampInfoEXT( VkCalibratedTimestampInfoEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( CalibratedTimestampInfoEXT ) );
-    }
-
-    CalibratedTimestampInfoEXT& operator=( VkCalibratedTimestampInfoEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( CalibratedTimestampInfoEXT ) );
-      return *this;
-    }
-    CalibratedTimestampInfoEXT& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    CalibratedTimestampInfoEXT& setTimeDomain( TimeDomainEXT timeDomain_ )
-    {
-      timeDomain = timeDomain_;
-      return *this;
-    }
-
-    operator VkCalibratedTimestampInfoEXT const&() const
-    {
-      return *reinterpret_cast<const VkCalibratedTimestampInfoEXT*>(this);
-    }
-
-    operator VkCalibratedTimestampInfoEXT &()
-    {
-      return *reinterpret_cast<VkCalibratedTimestampInfoEXT*>(this);
-    }
-
-    bool operator==( CalibratedTimestampInfoEXT const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( timeDomain == rhs.timeDomain );
-    }
-
-    bool operator!=( CalibratedTimestampInfoEXT const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eCalibratedTimestampInfoEXT;
-
-  public:
-    const void* pNext = nullptr;
-    TimeDomainEXT timeDomain;
-  };
-  static_assert( sizeof( CalibratedTimestampInfoEXT ) == sizeof( VkCalibratedTimestampInfoEXT ), "struct and wrapper have different size!" );
-
-  enum class DebugReportFlagBitsEXT
-  {
-    eInformation = VK_DEBUG_REPORT_INFORMATION_BIT_EXT,
-    eWarning = VK_DEBUG_REPORT_WARNING_BIT_EXT,
-    ePerformanceWarning = VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
-    eError = VK_DEBUG_REPORT_ERROR_BIT_EXT,
-    eDebug = VK_DEBUG_REPORT_DEBUG_BIT_EXT
-  };
-
-  using DebugReportFlagsEXT = Flags<DebugReportFlagBitsEXT, VkDebugReportFlagsEXT>;
-
-  VULKAN_HPP_INLINE DebugReportFlagsEXT operator|( DebugReportFlagBitsEXT bit0, DebugReportFlagBitsEXT bit1 )
-  {
-    return DebugReportFlagsEXT( bit0 ) | bit1;
-  }
-
-  VULKAN_HPP_INLINE DebugReportFlagsEXT operator~( DebugReportFlagBitsEXT bits )
-  {
-    return ~( DebugReportFlagsEXT( bits ) );
-  }
-
-  template <> struct FlagTraits<DebugReportFlagBitsEXT>
-  {
-    enum
-    {
-      allFlags = VkFlags(DebugReportFlagBitsEXT::eInformation) | VkFlags(DebugReportFlagBitsEXT::eWarning) | VkFlags(DebugReportFlagBitsEXT::ePerformanceWarning) | VkFlags(DebugReportFlagBitsEXT::eError) | VkFlags(DebugReportFlagBitsEXT::eDebug)
-    };
-  };
-
-  struct DebugReportCallbackCreateInfoEXT
-  {
-    DebugReportCallbackCreateInfoEXT( DebugReportFlagsEXT flags_ = DebugReportFlagsEXT(),
-                                      PFN_vkDebugReportCallbackEXT pfnCallback_ = nullptr,
-                                      void* pUserData_ = nullptr )
-      : flags( flags_ )
-      , pfnCallback( pfnCallback_ )
-      , pUserData( pUserData_ )
-    {
-    }
-
-    DebugReportCallbackCreateInfoEXT( VkDebugReportCallbackCreateInfoEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DebugReportCallbackCreateInfoEXT ) );
-    }
-
-    DebugReportCallbackCreateInfoEXT& operator=( VkDebugReportCallbackCreateInfoEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DebugReportCallbackCreateInfoEXT ) );
-      return *this;
-    }
-    DebugReportCallbackCreateInfoEXT& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    DebugReportCallbackCreateInfoEXT& setFlags( DebugReportFlagsEXT flags_ )
-    {
-      flags = flags_;
-      return *this;
-    }
-
-    DebugReportCallbackCreateInfoEXT& setPfnCallback( PFN_vkDebugReportCallbackEXT pfnCallback_ )
-    {
-      pfnCallback = pfnCallback_;
-      return *this;
-    }
-
-    DebugReportCallbackCreateInfoEXT& setPUserData( void* pUserData_ )
-    {
-      pUserData = pUserData_;
-      return *this;
-    }
-
-    operator VkDebugReportCallbackCreateInfoEXT const&() const
-    {
-      return *reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT*>(this);
-    }
-
-    operator VkDebugReportCallbackCreateInfoEXT &()
-    {
-      return *reinterpret_cast<VkDebugReportCallbackCreateInfoEXT*>(this);
-    }
-
-    bool operator==( DebugReportCallbackCreateInfoEXT const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( pfnCallback == rhs.pfnCallback )
-          && ( pUserData == rhs.pUserData );
-    }
-
-    bool operator!=( DebugReportCallbackCreateInfoEXT const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eDebugReportCallbackCreateInfoEXT;
-
-  public:
-    const void* pNext = nullptr;
-    DebugReportFlagsEXT flags;
-    PFN_vkDebugReportCallbackEXT pfnCallback;
-    void* pUserData;
-  };
-  static_assert( sizeof( DebugReportCallbackCreateInfoEXT ) == sizeof( VkDebugReportCallbackCreateInfoEXT ), "struct and wrapper have different size!" );
-
-  enum class DebugReportObjectTypeEXT
-  {
-    eUnknown = VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT,
-    eInstance = VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT,
-    ePhysicalDevice = VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
-    eDevice = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
-    eQueue = VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT,
-    eSemaphore = VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT,
-    eCommandBuffer = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
-    eFence = VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT,
-    eDeviceMemory = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
-    eBuffer = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
-    eImage = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
-    eEvent = VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT,
-    eQueryPool = VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT,
-    eBufferView = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT,
-    eImageView = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT,
-    eShaderModule = VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
-    ePipelineCache = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT,
-    ePipelineLayout = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT,
-    eRenderPass = VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
-    ePipeline = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
-    eDescriptorSetLayout = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT,
-    eSampler = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT,
-    eDescriptorPool = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT,
-    eDescriptorSet = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
-    eFramebuffer = VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT,
-    eCommandPool = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT,
-    eSurfaceKhr = VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT,
-    eSwapchainKhr = VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
-    eDebugReportCallbackExt = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT,
-    eDebugReport = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT,
-    eDisplayKhr = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT,
-    eDisplayModeKhr = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT,
-    eObjectTableNvx = VK_DEBUG_REPORT_OBJECT_TYPE_OBJECT_TABLE_NVX_EXT,
-    eIndirectCommandsLayoutNvx = VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT,
-    eValidationCacheExt = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT,
-    eValidationCache = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT,
-    eSamplerYcbcrConversion = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT,
-    eSamplerYcbcrConversionKHR = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT,
-    eDescriptorUpdateTemplate = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT,
-    eDescriptorUpdateTemplateKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT,
-    eAccelerationStructureNV = VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT
-  };
-
-  struct DebugMarkerObjectNameInfoEXT
-  {
-    DebugMarkerObjectNameInfoEXT( DebugReportObjectTypeEXT objectType_ = DebugReportObjectTypeEXT::eUnknown,
-                                  uint64_t object_ = 0,
-                                  const char* pObjectName_ = nullptr )
-      : objectType( objectType_ )
-      , object( object_ )
-      , pObjectName( pObjectName_ )
-    {
-    }
-
-    DebugMarkerObjectNameInfoEXT( VkDebugMarkerObjectNameInfoEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DebugMarkerObjectNameInfoEXT ) );
-    }
-
-    DebugMarkerObjectNameInfoEXT& operator=( VkDebugMarkerObjectNameInfoEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DebugMarkerObjectNameInfoEXT ) );
-      return *this;
-    }
-    DebugMarkerObjectNameInfoEXT& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    DebugMarkerObjectNameInfoEXT& setObjectType( DebugReportObjectTypeEXT objectType_ )
-    {
-      objectType = objectType_;
-      return *this;
-    }
-
-    DebugMarkerObjectNameInfoEXT& setObject( uint64_t object_ )
-    {
-      object = object_;
-      return *this;
-    }
-
-    DebugMarkerObjectNameInfoEXT& setPObjectName( const char* pObjectName_ )
-    {
-      pObjectName = pObjectName_;
-      return *this;
-    }
-
-    operator VkDebugMarkerObjectNameInfoEXT const&() const
-    {
-      return *reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT*>(this);
-    }
-
-    operator VkDebugMarkerObjectNameInfoEXT &()
-    {
-      return *reinterpret_cast<VkDebugMarkerObjectNameInfoEXT*>(this);
-    }
-
-    bool operator==( DebugMarkerObjectNameInfoEXT const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( objectType == rhs.objectType )
-          && ( object == rhs.object )
-          && ( pObjectName == rhs.pObjectName );
-    }
-
-    bool operator!=( DebugMarkerObjectNameInfoEXT const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eDebugMarkerObjectNameInfoEXT;
-
-  public:
-    const void* pNext = nullptr;
-    DebugReportObjectTypeEXT objectType;
-    uint64_t object;
-    const char* pObjectName;
-  };
-  static_assert( sizeof( DebugMarkerObjectNameInfoEXT ) == sizeof( VkDebugMarkerObjectNameInfoEXT ), "struct and wrapper have different size!" );
-
-  struct DebugMarkerObjectTagInfoEXT
-  {
-    DebugMarkerObjectTagInfoEXT( DebugReportObjectTypeEXT objectType_ = DebugReportObjectTypeEXT::eUnknown,
-                                 uint64_t object_ = 0,
-                                 uint64_t tagName_ = 0,
-                                 size_t tagSize_ = 0,
-                                 const void* pTag_ = nullptr )
-      : objectType( objectType_ )
-      , object( object_ )
-      , tagName( tagName_ )
-      , tagSize( tagSize_ )
-      , pTag( pTag_ )
-    {
-    }
-
-    DebugMarkerObjectTagInfoEXT( VkDebugMarkerObjectTagInfoEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DebugMarkerObjectTagInfoEXT ) );
-    }
-
-    DebugMarkerObjectTagInfoEXT& operator=( VkDebugMarkerObjectTagInfoEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DebugMarkerObjectTagInfoEXT ) );
-      return *this;
-    }
-    DebugMarkerObjectTagInfoEXT& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    DebugMarkerObjectTagInfoEXT& setObjectType( DebugReportObjectTypeEXT objectType_ )
-    {
-      objectType = objectType_;
-      return *this;
-    }
-
-    DebugMarkerObjectTagInfoEXT& setObject( uint64_t object_ )
-    {
-      object = object_;
-      return *this;
-    }
-
-    DebugMarkerObjectTagInfoEXT& setTagName( uint64_t tagName_ )
-    {
-      tagName = tagName_;
-      return *this;
-    }
-
-    DebugMarkerObjectTagInfoEXT& setTagSize( size_t tagSize_ )
-    {
-      tagSize = tagSize_;
-      return *this;
-    }
-
-    DebugMarkerObjectTagInfoEXT& setPTag( const void* pTag_ )
-    {
-      pTag = pTag_;
-      return *this;
-    }
-
-    operator VkDebugMarkerObjectTagInfoEXT const&() const
-    {
-      return *reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT*>(this);
-    }
-
-    operator VkDebugMarkerObjectTagInfoEXT &()
-    {
-      return *reinterpret_cast<VkDebugMarkerObjectTagInfoEXT*>(this);
-    }
-
-    bool operator==( DebugMarkerObjectTagInfoEXT const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( objectType == rhs.objectType )
-          && ( object == rhs.object )
-          && ( tagName == rhs.tagName )
-          && ( tagSize == rhs.tagSize )
-          && ( pTag == rhs.pTag );
-    }
-
-    bool operator!=( DebugMarkerObjectTagInfoEXT const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eDebugMarkerObjectTagInfoEXT;
-
-  public:
-    const void* pNext = nullptr;
-    DebugReportObjectTypeEXT objectType;
-    uint64_t object;
-    uint64_t tagName;
-    size_t tagSize;
-    const void* pTag;
-  };
-  static_assert( sizeof( DebugMarkerObjectTagInfoEXT ) == sizeof( VkDebugMarkerObjectTagInfoEXT ), "struct and wrapper have different size!" );
-
-  enum class RasterizationOrderAMD
-  {
-    eStrict = VK_RASTERIZATION_ORDER_STRICT_AMD,
-    eRelaxed = VK_RASTERIZATION_ORDER_RELAXED_AMD
-  };
-
-  struct PipelineRasterizationStateRasterizationOrderAMD
-  {
-    PipelineRasterizationStateRasterizationOrderAMD( RasterizationOrderAMD rasterizationOrder_ = RasterizationOrderAMD::eStrict )
-      : rasterizationOrder( rasterizationOrder_ )
-    {
-    }
-
-    PipelineRasterizationStateRasterizationOrderAMD( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PipelineRasterizationStateRasterizationOrderAMD ) );
-    }
-
-    PipelineRasterizationStateRasterizationOrderAMD& operator=( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PipelineRasterizationStateRasterizationOrderAMD ) );
-      return *this;
-    }
-    PipelineRasterizationStateRasterizationOrderAMD& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    PipelineRasterizationStateRasterizationOrderAMD& setRasterizationOrder( RasterizationOrderAMD rasterizationOrder_ )
-    {
-      rasterizationOrder = rasterizationOrder_;
-      return *this;
-    }
-
-    operator VkPipelineRasterizationStateRasterizationOrderAMD const&() const
-    {
-      return *reinterpret_cast<const VkPipelineRasterizationStateRasterizationOrderAMD*>(this);
-    }
-
-    operator VkPipelineRasterizationStateRasterizationOrderAMD &()
-    {
-      return *reinterpret_cast<VkPipelineRasterizationStateRasterizationOrderAMD*>(this);
-    }
-
-    bool operator==( PipelineRasterizationStateRasterizationOrderAMD const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( rasterizationOrder == rhs.rasterizationOrder );
-    }
-
-    bool operator!=( PipelineRasterizationStateRasterizationOrderAMD const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePipelineRasterizationStateRasterizationOrderAMD;
-
-  public:
-    const void* pNext = nullptr;
-    RasterizationOrderAMD rasterizationOrder;
-  };
-  static_assert( sizeof( PipelineRasterizationStateRasterizationOrderAMD ) == sizeof( VkPipelineRasterizationStateRasterizationOrderAMD ), "struct and wrapper have different size!" );
-
-  enum class ExternalMemoryHandleTypeFlagBitsNV
-  {
-    eOpaqueWin32 = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV,
-    eOpaqueWin32Kmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV,
-    eD3D11Image = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_BIT_NV,
-    eD3D11ImageKmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV
-  };
-
-  using ExternalMemoryHandleTypeFlagsNV = Flags<ExternalMemoryHandleTypeFlagBitsNV, VkExternalMemoryHandleTypeFlagsNV>;
-
-  VULKAN_HPP_INLINE ExternalMemoryHandleTypeFlagsNV operator|( ExternalMemoryHandleTypeFlagBitsNV bit0, ExternalMemoryHandleTypeFlagBitsNV bit1 )
-  {
-    return ExternalMemoryHandleTypeFlagsNV( bit0 ) | bit1;
-  }
-
-  VULKAN_HPP_INLINE ExternalMemoryHandleTypeFlagsNV operator~( ExternalMemoryHandleTypeFlagBitsNV bits )
-  {
-    return ~( ExternalMemoryHandleTypeFlagsNV( bits ) );
-  }
-
-  template <> struct FlagTraits<ExternalMemoryHandleTypeFlagBitsNV>
-  {
-    enum
-    {
-      allFlags = VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32) | VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt) | VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image) | VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt)
-    };
-  };
-
-  struct ExternalMemoryImageCreateInfoNV
-  {
-    ExternalMemoryImageCreateInfoNV( ExternalMemoryHandleTypeFlagsNV handleTypes_ = ExternalMemoryHandleTypeFlagsNV() )
-      : handleTypes( handleTypes_ )
-    {
-    }
-
-    ExternalMemoryImageCreateInfoNV( VkExternalMemoryImageCreateInfoNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ExternalMemoryImageCreateInfoNV ) );
-    }
-
-    ExternalMemoryImageCreateInfoNV& operator=( VkExternalMemoryImageCreateInfoNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ExternalMemoryImageCreateInfoNV ) );
-      return *this;
-    }
-    ExternalMemoryImageCreateInfoNV& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    ExternalMemoryImageCreateInfoNV& setHandleTypes( ExternalMemoryHandleTypeFlagsNV handleTypes_ )
-    {
-      handleTypes = handleTypes_;
-      return *this;
-    }
-
-    operator VkExternalMemoryImageCreateInfoNV const&() const
-    {
-      return *reinterpret_cast<const VkExternalMemoryImageCreateInfoNV*>(this);
-    }
-
-    operator VkExternalMemoryImageCreateInfoNV &()
-    {
-      return *reinterpret_cast<VkExternalMemoryImageCreateInfoNV*>(this);
-    }
-
-    bool operator==( ExternalMemoryImageCreateInfoNV const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( handleTypes == rhs.handleTypes );
-    }
-
-    bool operator!=( ExternalMemoryImageCreateInfoNV const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eExternalMemoryImageCreateInfoNV;
-
-  public:
-    const void* pNext = nullptr;
-    ExternalMemoryHandleTypeFlagsNV handleTypes;
-  };
-  static_assert( sizeof( ExternalMemoryImageCreateInfoNV ) == sizeof( VkExternalMemoryImageCreateInfoNV ), "struct and wrapper have different size!" );
-
-  struct ExportMemoryAllocateInfoNV
-  {
-    ExportMemoryAllocateInfoNV( ExternalMemoryHandleTypeFlagsNV handleTypes_ = ExternalMemoryHandleTypeFlagsNV() )
-      : handleTypes( handleTypes_ )
-    {
-    }
-
-    ExportMemoryAllocateInfoNV( VkExportMemoryAllocateInfoNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ExportMemoryAllocateInfoNV ) );
-    }
-
-    ExportMemoryAllocateInfoNV& operator=( VkExportMemoryAllocateInfoNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ExportMemoryAllocateInfoNV ) );
-      return *this;
-    }
-    ExportMemoryAllocateInfoNV& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    ExportMemoryAllocateInfoNV& setHandleTypes( ExternalMemoryHandleTypeFlagsNV handleTypes_ )
-    {
-      handleTypes = handleTypes_;
-      return *this;
-    }
-
-    operator VkExportMemoryAllocateInfoNV const&() const
-    {
-      return *reinterpret_cast<const VkExportMemoryAllocateInfoNV*>(this);
-    }
-
-    operator VkExportMemoryAllocateInfoNV &()
-    {
-      return *reinterpret_cast<VkExportMemoryAllocateInfoNV*>(this);
-    }
-
-    bool operator==( ExportMemoryAllocateInfoNV const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( handleTypes == rhs.handleTypes );
-    }
-
-    bool operator!=( ExportMemoryAllocateInfoNV const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eExportMemoryAllocateInfoNV;
-
-  public:
-    const void* pNext = nullptr;
-    ExternalMemoryHandleTypeFlagsNV handleTypes;
-  };
-  static_assert( sizeof( ExportMemoryAllocateInfoNV ) == sizeof( VkExportMemoryAllocateInfoNV ), "struct and wrapper have different size!" );
-
-#ifdef VK_USE_PLATFORM_WIN32_NV
-  struct ImportMemoryWin32HandleInfoNV
-  {
-    ImportMemoryWin32HandleInfoNV( ExternalMemoryHandleTypeFlagsNV handleType_ = ExternalMemoryHandleTypeFlagsNV(),
-                                   HANDLE handle_ = 0 )
-      : handleType( handleType_ )
-      , handle( handle_ )
-    {
-    }
-
-    ImportMemoryWin32HandleInfoNV( VkImportMemoryWin32HandleInfoNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ImportMemoryWin32HandleInfoNV ) );
-    }
-
-    ImportMemoryWin32HandleInfoNV& operator=( VkImportMemoryWin32HandleInfoNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ImportMemoryWin32HandleInfoNV ) );
-      return *this;
-    }
-    ImportMemoryWin32HandleInfoNV& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    ImportMemoryWin32HandleInfoNV& setHandleType( ExternalMemoryHandleTypeFlagsNV handleType_ )
-    {
-      handleType = handleType_;
-      return *this;
-    }
-
-    ImportMemoryWin32HandleInfoNV& setHandle( HANDLE handle_ )
-    {
-      handle = handle_;
-      return *this;
-    }
-
-    operator VkImportMemoryWin32HandleInfoNV const&() const
-    {
-      return *reinterpret_cast<const VkImportMemoryWin32HandleInfoNV*>(this);
-    }
-
-    operator VkImportMemoryWin32HandleInfoNV &()
-    {
-      return *reinterpret_cast<VkImportMemoryWin32HandleInfoNV*>(this);
-    }
-
-    bool operator==( ImportMemoryWin32HandleInfoNV const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( handleType == rhs.handleType )
-          && ( handle == rhs.handle );
-    }
-
-    bool operator!=( ImportMemoryWin32HandleInfoNV const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eImportMemoryWin32HandleInfoNV;
-
-  public:
-    const void* pNext = nullptr;
-    ExternalMemoryHandleTypeFlagsNV handleType;
-    HANDLE handle;
-  };
-  static_assert( sizeof( ImportMemoryWin32HandleInfoNV ) == sizeof( VkImportMemoryWin32HandleInfoNV ), "struct and wrapper have different size!" );
-#endif /*VK_USE_PLATFORM_WIN32_NV*/
-
-  enum class ExternalMemoryFeatureFlagBitsNV
-  {
-    eDedicatedOnly = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV,
-    eExportable = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV,
-    eImportable = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV
-  };
-
-  using ExternalMemoryFeatureFlagsNV = Flags<ExternalMemoryFeatureFlagBitsNV, VkExternalMemoryFeatureFlagsNV>;
-
-  VULKAN_HPP_INLINE ExternalMemoryFeatureFlagsNV operator|( ExternalMemoryFeatureFlagBitsNV bit0, ExternalMemoryFeatureFlagBitsNV bit1 )
-  {
-    return ExternalMemoryFeatureFlagsNV( bit0 ) | bit1;
-  }
-
-  VULKAN_HPP_INLINE ExternalMemoryFeatureFlagsNV operator~( ExternalMemoryFeatureFlagBitsNV bits )
-  {
-    return ~( ExternalMemoryFeatureFlagsNV( bits ) );
-  }
-
-  template <> struct FlagTraits<ExternalMemoryFeatureFlagBitsNV>
-  {
-    enum
-    {
-      allFlags = VkFlags(ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly) | VkFlags(ExternalMemoryFeatureFlagBitsNV::eExportable) | VkFlags(ExternalMemoryFeatureFlagBitsNV::eImportable)
-    };
-  };
-
-  struct ExternalImageFormatPropertiesNV
-  {
-    operator VkExternalImageFormatPropertiesNV const&() const
-    {
-      return *reinterpret_cast<const VkExternalImageFormatPropertiesNV*>(this);
-    }
-
-    operator VkExternalImageFormatPropertiesNV &()
-    {
-      return *reinterpret_cast<VkExternalImageFormatPropertiesNV*>(this);
-    }
-
-    bool operator==( ExternalImageFormatPropertiesNV const& rhs ) const
-    {
-      return ( imageFormatProperties == rhs.imageFormatProperties )
-          && ( externalMemoryFeatures == rhs.externalMemoryFeatures )
-          && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes )
-          && ( compatibleHandleTypes == rhs.compatibleHandleTypes );
-    }
-
-    bool operator!=( ExternalImageFormatPropertiesNV const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    ImageFormatProperties imageFormatProperties;
-    ExternalMemoryFeatureFlagsNV externalMemoryFeatures;
-    ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes;
-    ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes;
-  };
-  static_assert( sizeof( ExternalImageFormatPropertiesNV ) == sizeof( VkExternalImageFormatPropertiesNV ), "struct and wrapper have different size!" );
-
-  enum class ValidationCheckEXT
-  {
-    eAll = VK_VALIDATION_CHECK_ALL_EXT,
-    eShaders = VK_VALIDATION_CHECK_SHADERS_EXT
-  };
-
-  struct ValidationFlagsEXT
-  {
-    ValidationFlagsEXT( uint32_t disabledValidationCheckCount_ = 0,
-                        const ValidationCheckEXT* pDisabledValidationChecks_ = nullptr )
-      : disabledValidationCheckCount( disabledValidationCheckCount_ )
-      , pDisabledValidationChecks( pDisabledValidationChecks_ )
-    {
-    }
-
-    ValidationFlagsEXT( VkValidationFlagsEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ValidationFlagsEXT ) );
-    }
-
-    ValidationFlagsEXT& operator=( VkValidationFlagsEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ValidationFlagsEXT ) );
-      return *this;
-    }
-    ValidationFlagsEXT& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    ValidationFlagsEXT& setDisabledValidationCheckCount( uint32_t disabledValidationCheckCount_ )
-    {
-      disabledValidationCheckCount = disabledValidationCheckCount_;
-      return *this;
-    }
-
-    ValidationFlagsEXT& setPDisabledValidationChecks( const ValidationCheckEXT* pDisabledValidationChecks_ )
-    {
-      pDisabledValidationChecks = pDisabledValidationChecks_;
-      return *this;
-    }
-
-    operator VkValidationFlagsEXT const&() const
-    {
-      return *reinterpret_cast<const VkValidationFlagsEXT*>(this);
-    }
-
-    operator VkValidationFlagsEXT &()
-    {
-      return *reinterpret_cast<VkValidationFlagsEXT*>(this);
-    }
-
-    bool operator==( ValidationFlagsEXT const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( disabledValidationCheckCount == rhs.disabledValidationCheckCount )
-          && ( pDisabledValidationChecks == rhs.pDisabledValidationChecks );
-    }
-
-    bool operator!=( ValidationFlagsEXT const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eValidationFlagsEXT;
-
-  public:
-    const void* pNext = nullptr;
-    uint32_t disabledValidationCheckCount;
-    const ValidationCheckEXT* pDisabledValidationChecks;
-  };
-  static_assert( sizeof( ValidationFlagsEXT ) == sizeof( VkValidationFlagsEXT ), "struct and wrapper have different size!" );
-
-  enum class SubgroupFeatureFlagBits
-  {
-    eBasic = VK_SUBGROUP_FEATURE_BASIC_BIT,
-    eVote = VK_SUBGROUP_FEATURE_VOTE_BIT,
-    eArithmetic = VK_SUBGROUP_FEATURE_ARITHMETIC_BIT,
-    eBallot = VK_SUBGROUP_FEATURE_BALLOT_BIT,
-    eShuffle = VK_SUBGROUP_FEATURE_SHUFFLE_BIT,
-    eShuffleRelative = VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT,
-    eClustered = VK_SUBGROUP_FEATURE_CLUSTERED_BIT,
-    eQuad = VK_SUBGROUP_FEATURE_QUAD_BIT,
-    ePartitionedNV = VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV
-  };
-
-  using SubgroupFeatureFlags = Flags<SubgroupFeatureFlagBits, VkSubgroupFeatureFlags>;
-
-  VULKAN_HPP_INLINE SubgroupFeatureFlags operator|( SubgroupFeatureFlagBits bit0, SubgroupFeatureFlagBits bit1 )
-  {
-    return SubgroupFeatureFlags( bit0 ) | bit1;
-  }
-
-  VULKAN_HPP_INLINE SubgroupFeatureFlags operator~( SubgroupFeatureFlagBits bits )
-  {
-    return ~( SubgroupFeatureFlags( bits ) );
-  }
-
-  template <> struct FlagTraits<SubgroupFeatureFlagBits>
-  {
-    enum
-    {
-      allFlags = VkFlags(SubgroupFeatureFlagBits::eBasic) | VkFlags(SubgroupFeatureFlagBits::eVote) | VkFlags(SubgroupFeatureFlagBits::eArithmetic) | VkFlags(SubgroupFeatureFlagBits::eBallot) | VkFlags(SubgroupFeatureFlagBits::eShuffle) | VkFlags(SubgroupFeatureFlagBits::eShuffleRelative) | VkFlags(SubgroupFeatureFlagBits::eClustered) | VkFlags(SubgroupFeatureFlagBits::eQuad) | VkFlags(SubgroupFeatureFlagBits::ePartitionedNV)
-    };
-  };
-
-  struct PhysicalDeviceSubgroupProperties
-  {
-    operator VkPhysicalDeviceSubgroupProperties const&() const
-    {
-      return *reinterpret_cast<const VkPhysicalDeviceSubgroupProperties*>(this);
-    }
-
-    operator VkPhysicalDeviceSubgroupProperties &()
-    {
-      return *reinterpret_cast<VkPhysicalDeviceSubgroupProperties*>(this);
-    }
-
-    bool operator==( PhysicalDeviceSubgroupProperties const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( subgroupSize == rhs.subgroupSize )
-          && ( supportedStages == rhs.supportedStages )
-          && ( supportedOperations == rhs.supportedOperations )
-          && ( quadOperationsInAllStages == rhs.quadOperationsInAllStages );
-    }
-
-    bool operator!=( PhysicalDeviceSubgroupProperties const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePhysicalDeviceSubgroupProperties;
-
-  public:
-    void* pNext = nullptr;
-    uint32_t subgroupSize;
-    ShaderStageFlags supportedStages;
-    SubgroupFeatureFlags supportedOperations;
-    Bool32 quadOperationsInAllStages;
-  };
-  static_assert( sizeof( PhysicalDeviceSubgroupProperties ) == sizeof( VkPhysicalDeviceSubgroupProperties ), "struct and wrapper have different size!" );
-
-  enum class IndirectCommandsLayoutUsageFlagBitsNVX
-  {
-    eUnorderedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NVX,
-    eSparseSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_SPARSE_SEQUENCES_BIT_NVX,
-    eEmptyExecutions = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EMPTY_EXECUTIONS_BIT_NVX,
-    eIndexedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NVX
-  };
-
-  using IndirectCommandsLayoutUsageFlagsNVX = Flags<IndirectCommandsLayoutUsageFlagBitsNVX, VkIndirectCommandsLayoutUsageFlagsNVX>;
-
-  VULKAN_HPP_INLINE IndirectCommandsLayoutUsageFlagsNVX operator|( IndirectCommandsLayoutUsageFlagBitsNVX bit0, IndirectCommandsLayoutUsageFlagBitsNVX bit1 )
-  {
-    return IndirectCommandsLayoutUsageFlagsNVX( bit0 ) | bit1;
-  }
-
-  VULKAN_HPP_INLINE IndirectCommandsLayoutUsageFlagsNVX operator~( IndirectCommandsLayoutUsageFlagBitsNVX bits )
-  {
-    return ~( IndirectCommandsLayoutUsageFlagsNVX( bits ) );
-  }
-
-  template <> struct FlagTraits<IndirectCommandsLayoutUsageFlagBitsNVX>
-  {
-    enum
-    {
-      allFlags = VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eUnorderedSequences) | VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eSparseSequences) | VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eEmptyExecutions) | VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eIndexedSequences)
-    };
-  };
-
-  enum class ObjectEntryUsageFlagBitsNVX
-  {
-    eGraphics = VK_OBJECT_ENTRY_USAGE_GRAPHICS_BIT_NVX,
-    eCompute = VK_OBJECT_ENTRY_USAGE_COMPUTE_BIT_NVX
-  };
-
-  using ObjectEntryUsageFlagsNVX = Flags<ObjectEntryUsageFlagBitsNVX, VkObjectEntryUsageFlagsNVX>;
-
-  VULKAN_HPP_INLINE ObjectEntryUsageFlagsNVX operator|( ObjectEntryUsageFlagBitsNVX bit0, ObjectEntryUsageFlagBitsNVX bit1 )
-  {
-    return ObjectEntryUsageFlagsNVX( bit0 ) | bit1;
-  }
-
-  VULKAN_HPP_INLINE ObjectEntryUsageFlagsNVX operator~( ObjectEntryUsageFlagBitsNVX bits )
-  {
-    return ~( ObjectEntryUsageFlagsNVX( bits ) );
-  }
-
-  template <> struct FlagTraits<ObjectEntryUsageFlagBitsNVX>
-  {
-    enum
-    {
-      allFlags = VkFlags(ObjectEntryUsageFlagBitsNVX::eGraphics) | VkFlags(ObjectEntryUsageFlagBitsNVX::eCompute)
-    };
-  };
-
-  enum class IndirectCommandsTokenTypeNVX
-  {
-    ePipeline = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX,
-    eDescriptorSet = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DESCRIPTOR_SET_NVX,
-    eIndexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NVX,
-    eVertexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NVX,
-    ePushConstant = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NVX,
-    eDrawIndexed = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NVX,
-    eDraw = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NVX,
-    eDispatch = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NVX
-  };
-
-  struct IndirectCommandsTokenNVX
-  {
-    IndirectCommandsTokenNVX( IndirectCommandsTokenTypeNVX tokenType_ = IndirectCommandsTokenTypeNVX::ePipeline,
-                              Buffer buffer_ = Buffer(),
-                              DeviceSize offset_ = 0 )
-      : tokenType( tokenType_ )
-      , buffer( buffer_ )
-      , offset( offset_ )
-    {
-    }
-
-    IndirectCommandsTokenNVX( VkIndirectCommandsTokenNVX const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( IndirectCommandsTokenNVX ) );
-    }
-
-    IndirectCommandsTokenNVX& operator=( VkIndirectCommandsTokenNVX const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( IndirectCommandsTokenNVX ) );
-      return *this;
-    }
-    IndirectCommandsTokenNVX& setTokenType( IndirectCommandsTokenTypeNVX tokenType_ )
-    {
-      tokenType = tokenType_;
-      return *this;
-    }
-
-    IndirectCommandsTokenNVX& setBuffer( Buffer buffer_ )
-    {
-      buffer = buffer_;
-      return *this;
-    }
-
-    IndirectCommandsTokenNVX& setOffset( DeviceSize offset_ )
-    {
-      offset = offset_;
-      return *this;
-    }
-
-    operator VkIndirectCommandsTokenNVX const&() const
-    {
-      return *reinterpret_cast<const VkIndirectCommandsTokenNVX*>(this);
-    }
-
-    operator VkIndirectCommandsTokenNVX &()
-    {
-      return *reinterpret_cast<VkIndirectCommandsTokenNVX*>(this);
-    }
-
-    bool operator==( IndirectCommandsTokenNVX const& rhs ) const
-    {
-      return ( tokenType == rhs.tokenType )
-          && ( buffer == rhs.buffer )
-          && ( offset == rhs.offset );
-    }
-
-    bool operator!=( IndirectCommandsTokenNVX const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    IndirectCommandsTokenTypeNVX tokenType;
-    Buffer buffer;
-    DeviceSize offset;
-  };
-  static_assert( sizeof( IndirectCommandsTokenNVX ) == sizeof( VkIndirectCommandsTokenNVX ), "struct and wrapper have different size!" );
-
-  struct IndirectCommandsLayoutTokenNVX
-  {
-    IndirectCommandsLayoutTokenNVX( IndirectCommandsTokenTypeNVX tokenType_ = IndirectCommandsTokenTypeNVX::ePipeline,
-                                    uint32_t bindingUnit_ = 0,
-                                    uint32_t dynamicCount_ = 0,
-                                    uint32_t divisor_ = 0 )
-      : tokenType( tokenType_ )
-      , bindingUnit( bindingUnit_ )
-      , dynamicCount( dynamicCount_ )
-      , divisor( divisor_ )
-    {
-    }
-
-    IndirectCommandsLayoutTokenNVX( VkIndirectCommandsLayoutTokenNVX const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( IndirectCommandsLayoutTokenNVX ) );
-    }
-
-    IndirectCommandsLayoutTokenNVX& operator=( VkIndirectCommandsLayoutTokenNVX const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( IndirectCommandsLayoutTokenNVX ) );
-      return *this;
-    }
-    IndirectCommandsLayoutTokenNVX& setTokenType( IndirectCommandsTokenTypeNVX tokenType_ )
-    {
-      tokenType = tokenType_;
-      return *this;
-    }
-
-    IndirectCommandsLayoutTokenNVX& setBindingUnit( uint32_t bindingUnit_ )
-    {
-      bindingUnit = bindingUnit_;
-      return *this;
-    }
-
-    IndirectCommandsLayoutTokenNVX& setDynamicCount( uint32_t dynamicCount_ )
-    {
-      dynamicCount = dynamicCount_;
-      return *this;
-    }
-
-    IndirectCommandsLayoutTokenNVX& setDivisor( uint32_t divisor_ )
-    {
-      divisor = divisor_;
-      return *this;
-    }
-
-    operator VkIndirectCommandsLayoutTokenNVX const&() const
-    {
-      return *reinterpret_cast<const VkIndirectCommandsLayoutTokenNVX*>(this);
-    }
-
-    operator VkIndirectCommandsLayoutTokenNVX &()
-    {
-      return *reinterpret_cast<VkIndirectCommandsLayoutTokenNVX*>(this);
-    }
-
-    bool operator==( IndirectCommandsLayoutTokenNVX const& rhs ) const
-    {
-      return ( tokenType == rhs.tokenType )
-          && ( bindingUnit == rhs.bindingUnit )
-          && ( dynamicCount == rhs.dynamicCount )
-          && ( divisor == rhs.divisor );
-    }
-
-    bool operator!=( IndirectCommandsLayoutTokenNVX const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    IndirectCommandsTokenTypeNVX tokenType;
-    uint32_t bindingUnit;
-    uint32_t dynamicCount;
-    uint32_t divisor;
-  };
-  static_assert( sizeof( IndirectCommandsLayoutTokenNVX ) == sizeof( VkIndirectCommandsLayoutTokenNVX ), "struct and wrapper have different size!" );
-
-  struct IndirectCommandsLayoutCreateInfoNVX
-  {
-    IndirectCommandsLayoutCreateInfoNVX( PipelineBindPoint pipelineBindPoint_ = PipelineBindPoint::eGraphics,
-                                         IndirectCommandsLayoutUsageFlagsNVX flags_ = IndirectCommandsLayoutUsageFlagsNVX(),
-                                         uint32_t tokenCount_ = 0,
-                                         const IndirectCommandsLayoutTokenNVX* pTokens_ = nullptr )
-      : pipelineBindPoint( pipelineBindPoint_ )
-      , flags( flags_ )
-      , tokenCount( tokenCount_ )
-      , pTokens( pTokens_ )
-    {
-    }
-
-    IndirectCommandsLayoutCreateInfoNVX( VkIndirectCommandsLayoutCreateInfoNVX const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( IndirectCommandsLayoutCreateInfoNVX ) );
-    }
-
-    IndirectCommandsLayoutCreateInfoNVX& operator=( VkIndirectCommandsLayoutCreateInfoNVX const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( IndirectCommandsLayoutCreateInfoNVX ) );
-      return *this;
-    }
-    IndirectCommandsLayoutCreateInfoNVX& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    IndirectCommandsLayoutCreateInfoNVX& setPipelineBindPoint( PipelineBindPoint pipelineBindPoint_ )
-    {
-      pipelineBindPoint = pipelineBindPoint_;
-      return *this;
-    }
-
-    IndirectCommandsLayoutCreateInfoNVX& setFlags( IndirectCommandsLayoutUsageFlagsNVX flags_ )
-    {
-      flags = flags_;
-      return *this;
-    }
-
-    IndirectCommandsLayoutCreateInfoNVX& setTokenCount( uint32_t tokenCount_ )
-    {
-      tokenCount = tokenCount_;
-      return *this;
-    }
-
-    IndirectCommandsLayoutCreateInfoNVX& setPTokens( const IndirectCommandsLayoutTokenNVX* pTokens_ )
-    {
-      pTokens = pTokens_;
-      return *this;
-    }
-
-    operator VkIndirectCommandsLayoutCreateInfoNVX const&() const
-    {
-      return *reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNVX*>(this);
-    }
-
-    operator VkIndirectCommandsLayoutCreateInfoNVX &()
-    {
-      return *reinterpret_cast<VkIndirectCommandsLayoutCreateInfoNVX*>(this);
-    }
-
-    bool operator==( IndirectCommandsLayoutCreateInfoNVX const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( pipelineBindPoint == rhs.pipelineBindPoint )
-          && ( flags == rhs.flags )
-          && ( tokenCount == rhs.tokenCount )
-          && ( pTokens == rhs.pTokens );
-    }
-
-    bool operator!=( IndirectCommandsLayoutCreateInfoNVX const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eIndirectCommandsLayoutCreateInfoNVX;
-
-  public:
-    const void* pNext = nullptr;
-    PipelineBindPoint pipelineBindPoint;
-    IndirectCommandsLayoutUsageFlagsNVX flags;
-    uint32_t tokenCount;
-    const IndirectCommandsLayoutTokenNVX* pTokens;
-  };
-  static_assert( sizeof( IndirectCommandsLayoutCreateInfoNVX ) == sizeof( VkIndirectCommandsLayoutCreateInfoNVX ), "struct and wrapper have different size!" );
-
-  enum class ObjectEntryTypeNVX
-  {
-    eDescriptorSet = VK_OBJECT_ENTRY_TYPE_DESCRIPTOR_SET_NVX,
-    ePipeline = VK_OBJECT_ENTRY_TYPE_PIPELINE_NVX,
-    eIndexBuffer = VK_OBJECT_ENTRY_TYPE_INDEX_BUFFER_NVX,
-    eVertexBuffer = VK_OBJECT_ENTRY_TYPE_VERTEX_BUFFER_NVX,
-    ePushConstant = VK_OBJECT_ENTRY_TYPE_PUSH_CONSTANT_NVX
-  };
-
-  struct ObjectTableCreateInfoNVX
-  {
-    ObjectTableCreateInfoNVX( uint32_t objectCount_ = 0,
-                              const ObjectEntryTypeNVX* pObjectEntryTypes_ = nullptr,
-                              const uint32_t* pObjectEntryCounts_ = nullptr,
-                              const ObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags_ = nullptr,
-                              uint32_t maxUniformBuffersPerDescriptor_ = 0,
-                              uint32_t maxStorageBuffersPerDescriptor_ = 0,
-                              uint32_t maxStorageImagesPerDescriptor_ = 0,
-                              uint32_t maxSampledImagesPerDescriptor_ = 0,
-                              uint32_t maxPipelineLayouts_ = 0 )
-      : objectCount( objectCount_ )
-      , pObjectEntryTypes( pObjectEntryTypes_ )
-      , pObjectEntryCounts( pObjectEntryCounts_ )
-      , pObjectEntryUsageFlags( pObjectEntryUsageFlags_ )
-      , maxUniformBuffersPerDescriptor( maxUniformBuffersPerDescriptor_ )
-      , maxStorageBuffersPerDescriptor( maxStorageBuffersPerDescriptor_ )
-      , maxStorageImagesPerDescriptor( maxStorageImagesPerDescriptor_ )
-      , maxSampledImagesPerDescriptor( maxSampledImagesPerDescriptor_ )
-      , maxPipelineLayouts( maxPipelineLayouts_ )
-    {
-    }
-
-    ObjectTableCreateInfoNVX( VkObjectTableCreateInfoNVX const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ObjectTableCreateInfoNVX ) );
-    }
-
-    ObjectTableCreateInfoNVX& operator=( VkObjectTableCreateInfoNVX const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ObjectTableCreateInfoNVX ) );
-      return *this;
-    }
-    ObjectTableCreateInfoNVX& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    ObjectTableCreateInfoNVX& setObjectCount( uint32_t objectCount_ )
-    {
-      objectCount = objectCount_;
-      return *this;
-    }
-
-    ObjectTableCreateInfoNVX& setPObjectEntryTypes( const ObjectEntryTypeNVX* pObjectEntryTypes_ )
-    {
-      pObjectEntryTypes = pObjectEntryTypes_;
-      return *this;
-    }
-
-    ObjectTableCreateInfoNVX& setPObjectEntryCounts( const uint32_t* pObjectEntryCounts_ )
-    {
-      pObjectEntryCounts = pObjectEntryCounts_;
-      return *this;
-    }
-
-    ObjectTableCreateInfoNVX& setPObjectEntryUsageFlags( const ObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags_ )
-    {
-      pObjectEntryUsageFlags = pObjectEntryUsageFlags_;
-      return *this;
-    }
-
-    ObjectTableCreateInfoNVX& setMaxUniformBuffersPerDescriptor( uint32_t maxUniformBuffersPerDescriptor_ )
-    {
-      maxUniformBuffersPerDescriptor = maxUniformBuffersPerDescriptor_;
-      return *this;
-    }
-
-    ObjectTableCreateInfoNVX& setMaxStorageBuffersPerDescriptor( uint32_t maxStorageBuffersPerDescriptor_ )
-    {
-      maxStorageBuffersPerDescriptor = maxStorageBuffersPerDescriptor_;
-      return *this;
-    }
-
-    ObjectTableCreateInfoNVX& setMaxStorageImagesPerDescriptor( uint32_t maxStorageImagesPerDescriptor_ )
-    {
-      maxStorageImagesPerDescriptor = maxStorageImagesPerDescriptor_;
-      return *this;
-    }
-
-    ObjectTableCreateInfoNVX& setMaxSampledImagesPerDescriptor( uint32_t maxSampledImagesPerDescriptor_ )
-    {
-      maxSampledImagesPerDescriptor = maxSampledImagesPerDescriptor_;
-      return *this;
-    }
-
-    ObjectTableCreateInfoNVX& setMaxPipelineLayouts( uint32_t maxPipelineLayouts_ )
-    {
-      maxPipelineLayouts = maxPipelineLayouts_;
-      return *this;
-    }
-
-    operator VkObjectTableCreateInfoNVX const&() const
-    {
-      return *reinterpret_cast<const VkObjectTableCreateInfoNVX*>(this);
-    }
-
-    operator VkObjectTableCreateInfoNVX &()
-    {
-      return *reinterpret_cast<VkObjectTableCreateInfoNVX*>(this);
-    }
-
-    bool operator==( ObjectTableCreateInfoNVX const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( objectCount == rhs.objectCount )
-          && ( pObjectEntryTypes == rhs.pObjectEntryTypes )
-          && ( pObjectEntryCounts == rhs.pObjectEntryCounts )
-          && ( pObjectEntryUsageFlags == rhs.pObjectEntryUsageFlags )
-          && ( maxUniformBuffersPerDescriptor == rhs.maxUniformBuffersPerDescriptor )
-          && ( maxStorageBuffersPerDescriptor == rhs.maxStorageBuffersPerDescriptor )
-          && ( maxStorageImagesPerDescriptor == rhs.maxStorageImagesPerDescriptor )
-          && ( maxSampledImagesPerDescriptor == rhs.maxSampledImagesPerDescriptor )
-          && ( maxPipelineLayouts == rhs.maxPipelineLayouts );
-    }
-
-    bool operator!=( ObjectTableCreateInfoNVX const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eObjectTableCreateInfoNVX;
-
-  public:
-    const void* pNext = nullptr;
-    uint32_t objectCount;
-    const ObjectEntryTypeNVX* pObjectEntryTypes;
-    const uint32_t* pObjectEntryCounts;
-    const ObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags;
-    uint32_t maxUniformBuffersPerDescriptor;
-    uint32_t maxStorageBuffersPerDescriptor;
-    uint32_t maxStorageImagesPerDescriptor;
-    uint32_t maxSampledImagesPerDescriptor;
-    uint32_t maxPipelineLayouts;
-  };
-  static_assert( sizeof( ObjectTableCreateInfoNVX ) == sizeof( VkObjectTableCreateInfoNVX ), "struct and wrapper have different size!" );
-
-  struct ObjectTableEntryNVX
-  {
-    ObjectTableEntryNVX( ObjectEntryTypeNVX type_ = ObjectEntryTypeNVX::eDescriptorSet,
-                         ObjectEntryUsageFlagsNVX flags_ = ObjectEntryUsageFlagsNVX() )
-      : type( type_ )
-      , flags( flags_ )
-    {
-    }
-
-    ObjectTableEntryNVX( VkObjectTableEntryNVX const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ObjectTableEntryNVX ) );
-    }
-
-    ObjectTableEntryNVX& operator=( VkObjectTableEntryNVX const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ObjectTableEntryNVX ) );
-      return *this;
-    }
-    ObjectTableEntryNVX& setType( ObjectEntryTypeNVX type_ )
-    {
-      type = type_;
-      return *this;
-    }
-
-    ObjectTableEntryNVX& setFlags( ObjectEntryUsageFlagsNVX flags_ )
-    {
-      flags = flags_;
-      return *this;
-    }
-
-    operator VkObjectTableEntryNVX const&() const
-    {
-      return *reinterpret_cast<const VkObjectTableEntryNVX*>(this);
-    }
-
-    operator VkObjectTableEntryNVX &()
-    {
-      return *reinterpret_cast<VkObjectTableEntryNVX*>(this);
-    }
-
-    bool operator==( ObjectTableEntryNVX const& rhs ) const
-    {
-      return ( type == rhs.type )
-          && ( flags == rhs.flags );
-    }
-
-    bool operator!=( ObjectTableEntryNVX const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    ObjectEntryTypeNVX type;
-    ObjectEntryUsageFlagsNVX flags;
-  };
-  static_assert( sizeof( ObjectTableEntryNVX ) == sizeof( VkObjectTableEntryNVX ), "struct and wrapper have different size!" );
-
-  struct ObjectTablePipelineEntryNVX
-  {
-    ObjectTablePipelineEntryNVX( ObjectEntryTypeNVX type_ = ObjectEntryTypeNVX::eDescriptorSet,
-                                 ObjectEntryUsageFlagsNVX flags_ = ObjectEntryUsageFlagsNVX(),
-                                 Pipeline pipeline_ = Pipeline() )
-      : type( type_ )
-      , flags( flags_ )
-      , pipeline( pipeline_ )
-    {
-    }
-
-    explicit ObjectTablePipelineEntryNVX( ObjectTableEntryNVX const& objectTableEntryNVX,
-                                          Pipeline pipeline_ = Pipeline() )
-      : type( objectTableEntryNVX.type )
-      , flags( objectTableEntryNVX.flags )
-      , pipeline( pipeline_ )
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR SparseImageFormatProperties2(VULKAN_HPP_NAMESPACE::SparseImageFormatProperties properties_ = {}) VULKAN_HPP_NOEXCEPT
+    : properties( properties_ )
     {}
 
-    ObjectTablePipelineEntryNVX( VkObjectTablePipelineEntryNVX const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ObjectTablePipelineEntryNVX ) );
-    }
+    VULKAN_HPP_CONSTEXPR SparseImageFormatProperties2( SparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    ObjectTablePipelineEntryNVX& operator=( VkObjectTablePipelineEntryNVX const & rhs )
+    SparseImageFormatProperties2( VkSparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      memcpy( this, &rhs, sizeof( ObjectTablePipelineEntryNVX ) );
-      return *this;
+      *this = rhs;
     }
-    ObjectTablePipelineEntryNVX& setType( ObjectEntryTypeNVX type_ )
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    SparseImageFormatProperties2 & operator=( VkSparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      type = type_;
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 const *>( &rhs );
       return *this;
     }
 
-    ObjectTablePipelineEntryNVX& setFlags( ObjectEntryUsageFlagsNVX flags_ )
+    SparseImageFormatProperties2 & operator=( SparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      flags = flags_;
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( SparseImageFormatProperties2 ) );
       return *this;
     }
 
-    ObjectTablePipelineEntryNVX& setPipeline( Pipeline pipeline_ )
+
+    operator VkSparseImageFormatProperties2 const&() const VULKAN_HPP_NOEXCEPT
     {
-      pipeline = pipeline_;
-      return *this;
+      return *reinterpret_cast<const VkSparseImageFormatProperties2*>( this );
     }
 
-    operator VkObjectTablePipelineEntryNVX const&() const
+    operator VkSparseImageFormatProperties2 &() VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkObjectTablePipelineEntryNVX*>(this);
+      return *reinterpret_cast<VkSparseImageFormatProperties2*>( this );
     }
 
-    operator VkObjectTablePipelineEntryNVX &()
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( SparseImageFormatProperties2 const& ) const = default;
+#else
+    bool operator==( SparseImageFormatProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkObjectTablePipelineEntryNVX*>(this);
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( properties == rhs.properties );
     }
 
-    bool operator==( ObjectTablePipelineEntryNVX const& rhs ) const
-    {
-      return ( type == rhs.type )
-          && ( flags == rhs.flags )
-          && ( pipeline == rhs.pipeline );
-    }
-
-    bool operator!=( ObjectTablePipelineEntryNVX const& rhs ) const
+    bool operator!=( SparseImageFormatProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
+#endif
 
-    ObjectEntryTypeNVX type;
-    ObjectEntryUsageFlagsNVX flags;
-    Pipeline pipeline;
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSparseImageFormatProperties2;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::SparseImageFormatProperties properties = {};
+
   };
-  static_assert( sizeof( ObjectTablePipelineEntryNVX ) == sizeof( VkObjectTablePipelineEntryNVX ), "struct and wrapper have different size!" );
+  static_assert( sizeof( SparseImageFormatProperties2 ) == sizeof( VkSparseImageFormatProperties2 ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SparseImageFormatProperties2>::value, "struct wrapper is not a standard layout!" );
 
-  struct ObjectTableDescriptorSetEntryNVX
+  template <>
+  struct CppType<StructureType, StructureType::eSparseImageFormatProperties2>
   {
-    ObjectTableDescriptorSetEntryNVX( ObjectEntryTypeNVX type_ = ObjectEntryTypeNVX::eDescriptorSet,
-                                      ObjectEntryUsageFlagsNVX flags_ = ObjectEntryUsageFlagsNVX(),
-                                      PipelineLayout pipelineLayout_ = PipelineLayout(),
-                                      DescriptorSet descriptorSet_ = DescriptorSet() )
-      : type( type_ )
-      , flags( flags_ )
-      , pipelineLayout( pipelineLayout_ )
-      , descriptorSet( descriptorSet_ )
-    {
-    }
+    using Type = SparseImageFormatProperties2;
+  };
+  using SparseImageFormatProperties2KHR = SparseImageFormatProperties2;
 
-    explicit ObjectTableDescriptorSetEntryNVX( ObjectTableEntryNVX const& objectTableEntryNVX,
-                                               PipelineLayout pipelineLayout_ = PipelineLayout(),
-                                               DescriptorSet descriptorSet_ = DescriptorSet() )
-      : type( objectTableEntryNVX.type )
-      , flags( objectTableEntryNVX.flags )
-      , pipelineLayout( pipelineLayout_ )
-      , descriptorSet( descriptorSet_ )
+  struct FramebufferMixedSamplesCombinationNV
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferMixedSamplesCombinationNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR FramebufferMixedSamplesCombinationNV(VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge, VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, VULKAN_HPP_NAMESPACE::SampleCountFlags depthStencilSamples_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags colorSamples_ = {}) VULKAN_HPP_NOEXCEPT
+    : coverageReductionMode( coverageReductionMode_ ), rasterizationSamples( rasterizationSamples_ ), depthStencilSamples( depthStencilSamples_ ), colorSamples( colorSamples_ )
     {}
 
-    ObjectTableDescriptorSetEntryNVX( VkObjectTableDescriptorSetEntryNVX const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ObjectTableDescriptorSetEntryNVX ) );
-    }
+    VULKAN_HPP_CONSTEXPR FramebufferMixedSamplesCombinationNV( FramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
 
-    ObjectTableDescriptorSetEntryNVX& operator=( VkObjectTableDescriptorSetEntryNVX const & rhs )
+    FramebufferMixedSamplesCombinationNV( VkFramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      memcpy( this, &rhs, sizeof( ObjectTableDescriptorSetEntryNVX ) );
-      return *this;
+      *this = rhs;
     }
-    ObjectTableDescriptorSetEntryNVX& setType( ObjectEntryTypeNVX type_ )
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    FramebufferMixedSamplesCombinationNV & operator=( VkFramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      type = type_;
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV const *>( &rhs );
       return *this;
     }
 
-    ObjectTableDescriptorSetEntryNVX& setFlags( ObjectEntryUsageFlagsNVX flags_ )
+    FramebufferMixedSamplesCombinationNV & operator=( FramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      flags = flags_;
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( FramebufferMixedSamplesCombinationNV ) );
       return *this;
     }
 
-    ObjectTableDescriptorSetEntryNVX& setPipelineLayout( PipelineLayout pipelineLayout_ )
+
+    operator VkFramebufferMixedSamplesCombinationNV const&() const VULKAN_HPP_NOEXCEPT
     {
-      pipelineLayout = pipelineLayout_;
-      return *this;
+      return *reinterpret_cast<const VkFramebufferMixedSamplesCombinationNV*>( this );
     }
 
-    ObjectTableDescriptorSetEntryNVX& setDescriptorSet( DescriptorSet descriptorSet_ )
+    operator VkFramebufferMixedSamplesCombinationNV &() VULKAN_HPP_NOEXCEPT
     {
-      descriptorSet = descriptorSet_;
-      return *this;
+      return *reinterpret_cast<VkFramebufferMixedSamplesCombinationNV*>( this );
     }
 
-    operator VkObjectTableDescriptorSetEntryNVX const&() const
-    {
-      return *reinterpret_cast<const VkObjectTableDescriptorSetEntryNVX*>(this);
-    }
 
-    operator VkObjectTableDescriptorSetEntryNVX &()
-    {
-      return *reinterpret_cast<VkObjectTableDescriptorSetEntryNVX*>(this);
-    }
-
-    bool operator==( ObjectTableDescriptorSetEntryNVX const& rhs ) const
-    {
-      return ( type == rhs.type )
-          && ( flags == rhs.flags )
-          && ( pipelineLayout == rhs.pipelineLayout )
-          && ( descriptorSet == rhs.descriptorSet );
-    }
-
-    bool operator!=( ObjectTableDescriptorSetEntryNVX const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    ObjectEntryTypeNVX type;
-    ObjectEntryUsageFlagsNVX flags;
-    PipelineLayout pipelineLayout;
-    DescriptorSet descriptorSet;
-  };
-  static_assert( sizeof( ObjectTableDescriptorSetEntryNVX ) == sizeof( VkObjectTableDescriptorSetEntryNVX ), "struct and wrapper have different size!" );
-
-  struct ObjectTableVertexBufferEntryNVX
-  {
-    ObjectTableVertexBufferEntryNVX( ObjectEntryTypeNVX type_ = ObjectEntryTypeNVX::eDescriptorSet,
-                                     ObjectEntryUsageFlagsNVX flags_ = ObjectEntryUsageFlagsNVX(),
-                                     Buffer buffer_ = Buffer() )
-      : type( type_ )
-      , flags( flags_ )
-      , buffer( buffer_ )
-    {
-    }
-
-    explicit ObjectTableVertexBufferEntryNVX( ObjectTableEntryNVX const& objectTableEntryNVX,
-                                              Buffer buffer_ = Buffer() )
-      : type( objectTableEntryNVX.type )
-      , flags( objectTableEntryNVX.flags )
-      , buffer( buffer_ )
-    {}
-
-    ObjectTableVertexBufferEntryNVX( VkObjectTableVertexBufferEntryNVX const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ObjectTableVertexBufferEntryNVX ) );
-    }
-
-    ObjectTableVertexBufferEntryNVX& operator=( VkObjectTableVertexBufferEntryNVX const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ObjectTableVertexBufferEntryNVX ) );
-      return *this;
-    }
-    ObjectTableVertexBufferEntryNVX& setType( ObjectEntryTypeNVX type_ )
-    {
-      type = type_;
-      return *this;
-    }
-
-    ObjectTableVertexBufferEntryNVX& setFlags( ObjectEntryUsageFlagsNVX flags_ )
-    {
-      flags = flags_;
-      return *this;
-    }
-
-    ObjectTableVertexBufferEntryNVX& setBuffer( Buffer buffer_ )
-    {
-      buffer = buffer_;
-      return *this;
-    }
-
-    operator VkObjectTableVertexBufferEntryNVX const&() const
-    {
-      return *reinterpret_cast<const VkObjectTableVertexBufferEntryNVX*>(this);
-    }
-
-    operator VkObjectTableVertexBufferEntryNVX &()
-    {
-      return *reinterpret_cast<VkObjectTableVertexBufferEntryNVX*>(this);
-    }
-
-    bool operator==( ObjectTableVertexBufferEntryNVX const& rhs ) const
-    {
-      return ( type == rhs.type )
-          && ( flags == rhs.flags )
-          && ( buffer == rhs.buffer );
-    }
-
-    bool operator!=( ObjectTableVertexBufferEntryNVX const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    ObjectEntryTypeNVX type;
-    ObjectEntryUsageFlagsNVX flags;
-    Buffer buffer;
-  };
-  static_assert( sizeof( ObjectTableVertexBufferEntryNVX ) == sizeof( VkObjectTableVertexBufferEntryNVX ), "struct and wrapper have different size!" );
-
-  struct ObjectTableIndexBufferEntryNVX
-  {
-    ObjectTableIndexBufferEntryNVX( ObjectEntryTypeNVX type_ = ObjectEntryTypeNVX::eDescriptorSet,
-                                    ObjectEntryUsageFlagsNVX flags_ = ObjectEntryUsageFlagsNVX(),
-                                    Buffer buffer_ = Buffer(),
-                                    IndexType indexType_ = IndexType::eUint16 )
-      : type( type_ )
-      , flags( flags_ )
-      , buffer( buffer_ )
-      , indexType( indexType_ )
-    {
-    }
-
-    explicit ObjectTableIndexBufferEntryNVX( ObjectTableEntryNVX const& objectTableEntryNVX,
-                                             Buffer buffer_ = Buffer(),
-                                             IndexType indexType_ = IndexType::eUint16 )
-      : type( objectTableEntryNVX.type )
-      , flags( objectTableEntryNVX.flags )
-      , buffer( buffer_ )
-      , indexType( indexType_ )
-    {}
-
-    ObjectTableIndexBufferEntryNVX( VkObjectTableIndexBufferEntryNVX const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ObjectTableIndexBufferEntryNVX ) );
-    }
-
-    ObjectTableIndexBufferEntryNVX& operator=( VkObjectTableIndexBufferEntryNVX const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ObjectTableIndexBufferEntryNVX ) );
-      return *this;
-    }
-    ObjectTableIndexBufferEntryNVX& setType( ObjectEntryTypeNVX type_ )
-    {
-      type = type_;
-      return *this;
-    }
-
-    ObjectTableIndexBufferEntryNVX& setFlags( ObjectEntryUsageFlagsNVX flags_ )
-    {
-      flags = flags_;
-      return *this;
-    }
-
-    ObjectTableIndexBufferEntryNVX& setBuffer( Buffer buffer_ )
-    {
-      buffer = buffer_;
-      return *this;
-    }
-
-    ObjectTableIndexBufferEntryNVX& setIndexType( IndexType indexType_ )
-    {
-      indexType = indexType_;
-      return *this;
-    }
-
-    operator VkObjectTableIndexBufferEntryNVX const&() const
-    {
-      return *reinterpret_cast<const VkObjectTableIndexBufferEntryNVX*>(this);
-    }
-
-    operator VkObjectTableIndexBufferEntryNVX &()
-    {
-      return *reinterpret_cast<VkObjectTableIndexBufferEntryNVX*>(this);
-    }
-
-    bool operator==( ObjectTableIndexBufferEntryNVX const& rhs ) const
-    {
-      return ( type == rhs.type )
-          && ( flags == rhs.flags )
-          && ( buffer == rhs.buffer )
-          && ( indexType == rhs.indexType );
-    }
-
-    bool operator!=( ObjectTableIndexBufferEntryNVX const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    ObjectEntryTypeNVX type;
-    ObjectEntryUsageFlagsNVX flags;
-    Buffer buffer;
-    IndexType indexType;
-  };
-  static_assert( sizeof( ObjectTableIndexBufferEntryNVX ) == sizeof( VkObjectTableIndexBufferEntryNVX ), "struct and wrapper have different size!" );
-
-  struct ObjectTablePushConstantEntryNVX
-  {
-    ObjectTablePushConstantEntryNVX( ObjectEntryTypeNVX type_ = ObjectEntryTypeNVX::eDescriptorSet,
-                                     ObjectEntryUsageFlagsNVX flags_ = ObjectEntryUsageFlagsNVX(),
-                                     PipelineLayout pipelineLayout_ = PipelineLayout(),
-                                     ShaderStageFlags stageFlags_ = ShaderStageFlags() )
-      : type( type_ )
-      , flags( flags_ )
-      , pipelineLayout( pipelineLayout_ )
-      , stageFlags( stageFlags_ )
-    {
-    }
-
-    explicit ObjectTablePushConstantEntryNVX( ObjectTableEntryNVX const& objectTableEntryNVX,
-                                              PipelineLayout pipelineLayout_ = PipelineLayout(),
-                                              ShaderStageFlags stageFlags_ = ShaderStageFlags() )
-      : type( objectTableEntryNVX.type )
-      , flags( objectTableEntryNVX.flags )
-      , pipelineLayout( pipelineLayout_ )
-      , stageFlags( stageFlags_ )
-    {}
-
-    ObjectTablePushConstantEntryNVX( VkObjectTablePushConstantEntryNVX const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ObjectTablePushConstantEntryNVX ) );
-    }
-
-    ObjectTablePushConstantEntryNVX& operator=( VkObjectTablePushConstantEntryNVX const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ObjectTablePushConstantEntryNVX ) );
-      return *this;
-    }
-    ObjectTablePushConstantEntryNVX& setType( ObjectEntryTypeNVX type_ )
-    {
-      type = type_;
-      return *this;
-    }
-
-    ObjectTablePushConstantEntryNVX& setFlags( ObjectEntryUsageFlagsNVX flags_ )
-    {
-      flags = flags_;
-      return *this;
-    }
-
-    ObjectTablePushConstantEntryNVX& setPipelineLayout( PipelineLayout pipelineLayout_ )
-    {
-      pipelineLayout = pipelineLayout_;
-      return *this;
-    }
-
-    ObjectTablePushConstantEntryNVX& setStageFlags( ShaderStageFlags stageFlags_ )
-    {
-      stageFlags = stageFlags_;
-      return *this;
-    }
-
-    operator VkObjectTablePushConstantEntryNVX const&() const
-    {
-      return *reinterpret_cast<const VkObjectTablePushConstantEntryNVX*>(this);
-    }
-
-    operator VkObjectTablePushConstantEntryNVX &()
-    {
-      return *reinterpret_cast<VkObjectTablePushConstantEntryNVX*>(this);
-    }
-
-    bool operator==( ObjectTablePushConstantEntryNVX const& rhs ) const
-    {
-      return ( type == rhs.type )
-          && ( flags == rhs.flags )
-          && ( pipelineLayout == rhs.pipelineLayout )
-          && ( stageFlags == rhs.stageFlags );
-    }
-
-    bool operator!=( ObjectTablePushConstantEntryNVX const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    ObjectEntryTypeNVX type;
-    ObjectEntryUsageFlagsNVX flags;
-    PipelineLayout pipelineLayout;
-    ShaderStageFlags stageFlags;
-  };
-  static_assert( sizeof( ObjectTablePushConstantEntryNVX ) == sizeof( VkObjectTablePushConstantEntryNVX ), "struct and wrapper have different size!" );
-
-  enum class DescriptorSetLayoutCreateFlagBits
-  {
-    ePushDescriptorKHR = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR,
-    eUpdateAfterBindPoolEXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT
-  };
-
-  using DescriptorSetLayoutCreateFlags = Flags<DescriptorSetLayoutCreateFlagBits, VkDescriptorSetLayoutCreateFlags>;
-
-  VULKAN_HPP_INLINE DescriptorSetLayoutCreateFlags operator|( DescriptorSetLayoutCreateFlagBits bit0, DescriptorSetLayoutCreateFlagBits bit1 )
-  {
-    return DescriptorSetLayoutCreateFlags( bit0 ) | bit1;
-  }
-
-  VULKAN_HPP_INLINE DescriptorSetLayoutCreateFlags operator~( DescriptorSetLayoutCreateFlagBits bits )
-  {
-    return ~( DescriptorSetLayoutCreateFlags( bits ) );
-  }
-
-  template <> struct FlagTraits<DescriptorSetLayoutCreateFlagBits>
-  {
-    enum
-    {
-      allFlags = VkFlags(DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR) | VkFlags(DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPoolEXT)
-    };
-  };
-
-  struct DescriptorSetLayoutCreateInfo
-  {
-    DescriptorSetLayoutCreateInfo( DescriptorSetLayoutCreateFlags flags_ = DescriptorSetLayoutCreateFlags(),
-                                   uint32_t bindingCount_ = 0,
-                                   const DescriptorSetLayoutBinding* pBindings_ = nullptr )
-      : flags( flags_ )
-      , bindingCount( bindingCount_ )
-      , pBindings( pBindings_ )
-    {
-    }
-
-    DescriptorSetLayoutCreateInfo( VkDescriptorSetLayoutCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DescriptorSetLayoutCreateInfo ) );
-    }
-
-    DescriptorSetLayoutCreateInfo& operator=( VkDescriptorSetLayoutCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DescriptorSetLayoutCreateInfo ) );
-      return *this;
-    }
-    DescriptorSetLayoutCreateInfo& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    DescriptorSetLayoutCreateInfo& setFlags( DescriptorSetLayoutCreateFlags flags_ )
-    {
-      flags = flags_;
-      return *this;
-    }
-
-    DescriptorSetLayoutCreateInfo& setBindingCount( uint32_t bindingCount_ )
-    {
-      bindingCount = bindingCount_;
-      return *this;
-    }
-
-    DescriptorSetLayoutCreateInfo& setPBindings( const DescriptorSetLayoutBinding* pBindings_ )
-    {
-      pBindings = pBindings_;
-      return *this;
-    }
-
-    operator VkDescriptorSetLayoutCreateInfo const&() const
-    {
-      return *reinterpret_cast<const VkDescriptorSetLayoutCreateInfo*>(this);
-    }
-
-    operator VkDescriptorSetLayoutCreateInfo &()
-    {
-      return *reinterpret_cast<VkDescriptorSetLayoutCreateInfo*>(this);
-    }
-
-    bool operator==( DescriptorSetLayoutCreateInfo const& rhs ) const
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( FramebufferMixedSamplesCombinationNV const& ) const = default;
+#else
+    bool operator==( FramebufferMixedSamplesCombinationNV const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( bindingCount == rhs.bindingCount )
-          && ( pBindings == rhs.pBindings );
+          && ( coverageReductionMode == rhs.coverageReductionMode )
+          && ( rasterizationSamples == rhs.rasterizationSamples )
+          && ( depthStencilSamples == rhs.depthStencilSamples )
+          && ( colorSamples == rhs.colorSamples );
     }
 
-    bool operator!=( DescriptorSetLayoutCreateInfo const& rhs ) const
+    bool operator!=( FramebufferMixedSamplesCombinationNV const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
+#endif
 
-  private:
-    StructureType sType = StructureType::eDescriptorSetLayoutCreateInfo;
+
 
   public:
-    const void* pNext = nullptr;
-    DescriptorSetLayoutCreateFlags flags;
-    uint32_t bindingCount;
-    const DescriptorSetLayoutBinding* pBindings;
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferMixedSamplesCombinationNV;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge;
+    VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
+    VULKAN_HPP_NAMESPACE::SampleCountFlags depthStencilSamples = {};
+    VULKAN_HPP_NAMESPACE::SampleCountFlags colorSamples = {};
+
   };
-  static_assert( sizeof( DescriptorSetLayoutCreateInfo ) == sizeof( VkDescriptorSetLayoutCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( sizeof( FramebufferMixedSamplesCombinationNV ) == sizeof( VkFramebufferMixedSamplesCombinationNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<FramebufferMixedSamplesCombinationNV>::value, "struct wrapper is not a standard layout!" );
 
-  enum class ExternalMemoryHandleTypeFlagBits
+  template <>
+  struct CppType<StructureType, StructureType::eFramebufferMixedSamplesCombinationNV>
   {
-    eOpaqueFd = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT,
-    eOpaqueFdKHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT,
-    eOpaqueWin32 = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT,
-    eOpaqueWin32KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT,
-    eOpaqueWin32Kmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT,
-    eOpaqueWin32KmtKHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT,
-    eD3D11Texture = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT,
-    eD3D11TextureKHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT,
-    eD3D11TextureKmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT,
-    eD3D11TextureKmtKHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT,
-    eD3D12Heap = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT,
-    eD3D12HeapKHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT,
-    eD3D12Resource = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT,
-    eD3D12ResourceKHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT,
-    eDmaBufEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT,
-    eAndroidHardwareBufferANDROID = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID,
-    eHostAllocationEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT,
-    eHostMappedForeignMemoryEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT
-  };
-
-  using ExternalMemoryHandleTypeFlags = Flags<ExternalMemoryHandleTypeFlagBits, VkExternalMemoryHandleTypeFlags>;
-
-  VULKAN_HPP_INLINE ExternalMemoryHandleTypeFlags operator|( ExternalMemoryHandleTypeFlagBits bit0, ExternalMemoryHandleTypeFlagBits bit1 )
-  {
-    return ExternalMemoryHandleTypeFlags( bit0 ) | bit1;
-  }
-
-  VULKAN_HPP_INLINE ExternalMemoryHandleTypeFlags operator~( ExternalMemoryHandleTypeFlagBits bits )
-  {
-    return ~( ExternalMemoryHandleTypeFlags( bits ) );
-  }
-
-  template <> struct FlagTraits<ExternalMemoryHandleTypeFlagBits>
-  {
-    enum
-    {
-      allFlags = VkFlags(ExternalMemoryHandleTypeFlagBits::eOpaqueFd) | VkFlags(ExternalMemoryHandleTypeFlagBits::eOpaqueWin32) | VkFlags(ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt) | VkFlags(ExternalMemoryHandleTypeFlagBits::eD3D11Texture) | VkFlags(ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt) | VkFlags(ExternalMemoryHandleTypeFlagBits::eD3D12Heap) | VkFlags(ExternalMemoryHandleTypeFlagBits::eD3D12Resource) | VkFlags(ExternalMemoryHandleTypeFlagBits::eDmaBufEXT) | VkFlags(ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID) | VkFlags(ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT) | VkFlags(ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT)
-    };
-  };
-
-  using ExternalMemoryHandleTypeFlagsKHR = ExternalMemoryHandleTypeFlags;
-
-  struct PhysicalDeviceExternalImageFormatInfo
-  {
-    PhysicalDeviceExternalImageFormatInfo( ExternalMemoryHandleTypeFlagBits handleType_ = ExternalMemoryHandleTypeFlagBits::eOpaqueFd )
-      : handleType( handleType_ )
-    {
-    }
-
-    PhysicalDeviceExternalImageFormatInfo( VkPhysicalDeviceExternalImageFormatInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceExternalImageFormatInfo ) );
-    }
-
-    PhysicalDeviceExternalImageFormatInfo& operator=( VkPhysicalDeviceExternalImageFormatInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceExternalImageFormatInfo ) );
-      return *this;
-    }
-    PhysicalDeviceExternalImageFormatInfo& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    PhysicalDeviceExternalImageFormatInfo& setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ )
-    {
-      handleType = handleType_;
-      return *this;
-    }
-
-    operator VkPhysicalDeviceExternalImageFormatInfo const&() const
-    {
-      return *reinterpret_cast<const VkPhysicalDeviceExternalImageFormatInfo*>(this);
-    }
-
-    operator VkPhysicalDeviceExternalImageFormatInfo &()
-    {
-      return *reinterpret_cast<VkPhysicalDeviceExternalImageFormatInfo*>(this);
-    }
-
-    bool operator==( PhysicalDeviceExternalImageFormatInfo const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( handleType == rhs.handleType );
-    }
-
-    bool operator!=( PhysicalDeviceExternalImageFormatInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePhysicalDeviceExternalImageFormatInfo;
-
-  public:
-    const void* pNext = nullptr;
-    ExternalMemoryHandleTypeFlagBits handleType;
-  };
-  static_assert( sizeof( PhysicalDeviceExternalImageFormatInfo ) == sizeof( VkPhysicalDeviceExternalImageFormatInfo ), "struct and wrapper have different size!" );
-
-  using PhysicalDeviceExternalImageFormatInfoKHR = PhysicalDeviceExternalImageFormatInfo;
-
-  struct PhysicalDeviceExternalBufferInfo
-  {
-    PhysicalDeviceExternalBufferInfo( BufferCreateFlags flags_ = BufferCreateFlags(),
-                                      BufferUsageFlags usage_ = BufferUsageFlags(),
-                                      ExternalMemoryHandleTypeFlagBits handleType_ = ExternalMemoryHandleTypeFlagBits::eOpaqueFd )
-      : flags( flags_ )
-      , usage( usage_ )
-      , handleType( handleType_ )
-    {
-    }
-
-    PhysicalDeviceExternalBufferInfo( VkPhysicalDeviceExternalBufferInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceExternalBufferInfo ) );
-    }
-
-    PhysicalDeviceExternalBufferInfo& operator=( VkPhysicalDeviceExternalBufferInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceExternalBufferInfo ) );
-      return *this;
-    }
-    PhysicalDeviceExternalBufferInfo& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    PhysicalDeviceExternalBufferInfo& setFlags( BufferCreateFlags flags_ )
-    {
-      flags = flags_;
-      return *this;
-    }
-
-    PhysicalDeviceExternalBufferInfo& setUsage( BufferUsageFlags usage_ )
-    {
-      usage = usage_;
-      return *this;
-    }
-
-    PhysicalDeviceExternalBufferInfo& setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ )
-    {
-      handleType = handleType_;
-      return *this;
-    }
-
-    operator VkPhysicalDeviceExternalBufferInfo const&() const
-    {
-      return *reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo*>(this);
-    }
-
-    operator VkPhysicalDeviceExternalBufferInfo &()
-    {
-      return *reinterpret_cast<VkPhysicalDeviceExternalBufferInfo*>(this);
-    }
-
-    bool operator==( PhysicalDeviceExternalBufferInfo const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( usage == rhs.usage )
-          && ( handleType == rhs.handleType );
-    }
-
-    bool operator!=( PhysicalDeviceExternalBufferInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePhysicalDeviceExternalBufferInfo;
-
-  public:
-    const void* pNext = nullptr;
-    BufferCreateFlags flags;
-    BufferUsageFlags usage;
-    ExternalMemoryHandleTypeFlagBits handleType;
-  };
-  static_assert( sizeof( PhysicalDeviceExternalBufferInfo ) == sizeof( VkPhysicalDeviceExternalBufferInfo ), "struct and wrapper have different size!" );
-
-  using PhysicalDeviceExternalBufferInfoKHR = PhysicalDeviceExternalBufferInfo;
-
-  struct ExternalMemoryImageCreateInfo
-  {
-    ExternalMemoryImageCreateInfo( ExternalMemoryHandleTypeFlags handleTypes_ = ExternalMemoryHandleTypeFlags() )
-      : handleTypes( handleTypes_ )
-    {
-    }
-
-    ExternalMemoryImageCreateInfo( VkExternalMemoryImageCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ExternalMemoryImageCreateInfo ) );
-    }
-
-    ExternalMemoryImageCreateInfo& operator=( VkExternalMemoryImageCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ExternalMemoryImageCreateInfo ) );
-      return *this;
-    }
-    ExternalMemoryImageCreateInfo& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    ExternalMemoryImageCreateInfo& setHandleTypes( ExternalMemoryHandleTypeFlags handleTypes_ )
-    {
-      handleTypes = handleTypes_;
-      return *this;
-    }
-
-    operator VkExternalMemoryImageCreateInfo const&() const
-    {
-      return *reinterpret_cast<const VkExternalMemoryImageCreateInfo*>(this);
-    }
-
-    operator VkExternalMemoryImageCreateInfo &()
-    {
-      return *reinterpret_cast<VkExternalMemoryImageCreateInfo*>(this);
-    }
-
-    bool operator==( ExternalMemoryImageCreateInfo const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( handleTypes == rhs.handleTypes );
-    }
-
-    bool operator!=( ExternalMemoryImageCreateInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eExternalMemoryImageCreateInfo;
-
-  public:
-    const void* pNext = nullptr;
-    ExternalMemoryHandleTypeFlags handleTypes;
-  };
-  static_assert( sizeof( ExternalMemoryImageCreateInfo ) == sizeof( VkExternalMemoryImageCreateInfo ), "struct and wrapper have different size!" );
-
-  using ExternalMemoryImageCreateInfoKHR = ExternalMemoryImageCreateInfo;
-
-  struct ExternalMemoryBufferCreateInfo
-  {
-    ExternalMemoryBufferCreateInfo( ExternalMemoryHandleTypeFlags handleTypes_ = ExternalMemoryHandleTypeFlags() )
-      : handleTypes( handleTypes_ )
-    {
-    }
-
-    ExternalMemoryBufferCreateInfo( VkExternalMemoryBufferCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ExternalMemoryBufferCreateInfo ) );
-    }
-
-    ExternalMemoryBufferCreateInfo& operator=( VkExternalMemoryBufferCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ExternalMemoryBufferCreateInfo ) );
-      return *this;
-    }
-    ExternalMemoryBufferCreateInfo& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    ExternalMemoryBufferCreateInfo& setHandleTypes( ExternalMemoryHandleTypeFlags handleTypes_ )
-    {
-      handleTypes = handleTypes_;
-      return *this;
-    }
-
-    operator VkExternalMemoryBufferCreateInfo const&() const
-    {
-      return *reinterpret_cast<const VkExternalMemoryBufferCreateInfo*>(this);
-    }
-
-    operator VkExternalMemoryBufferCreateInfo &()
-    {
-      return *reinterpret_cast<VkExternalMemoryBufferCreateInfo*>(this);
-    }
-
-    bool operator==( ExternalMemoryBufferCreateInfo const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( handleTypes == rhs.handleTypes );
-    }
-
-    bool operator!=( ExternalMemoryBufferCreateInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eExternalMemoryBufferCreateInfo;
-
-  public:
-    const void* pNext = nullptr;
-    ExternalMemoryHandleTypeFlags handleTypes;
-  };
-  static_assert( sizeof( ExternalMemoryBufferCreateInfo ) == sizeof( VkExternalMemoryBufferCreateInfo ), "struct and wrapper have different size!" );
-
-  using ExternalMemoryBufferCreateInfoKHR = ExternalMemoryBufferCreateInfo;
-
-  struct ExportMemoryAllocateInfo
-  {
-    ExportMemoryAllocateInfo( ExternalMemoryHandleTypeFlags handleTypes_ = ExternalMemoryHandleTypeFlags() )
-      : handleTypes( handleTypes_ )
-    {
-    }
-
-    ExportMemoryAllocateInfo( VkExportMemoryAllocateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ExportMemoryAllocateInfo ) );
-    }
-
-    ExportMemoryAllocateInfo& operator=( VkExportMemoryAllocateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ExportMemoryAllocateInfo ) );
-      return *this;
-    }
-    ExportMemoryAllocateInfo& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    ExportMemoryAllocateInfo& setHandleTypes( ExternalMemoryHandleTypeFlags handleTypes_ )
-    {
-      handleTypes = handleTypes_;
-      return *this;
-    }
-
-    operator VkExportMemoryAllocateInfo const&() const
-    {
-      return *reinterpret_cast<const VkExportMemoryAllocateInfo*>(this);
-    }
-
-    operator VkExportMemoryAllocateInfo &()
-    {
-      return *reinterpret_cast<VkExportMemoryAllocateInfo*>(this);
-    }
-
-    bool operator==( ExportMemoryAllocateInfo const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( handleTypes == rhs.handleTypes );
-    }
-
-    bool operator!=( ExportMemoryAllocateInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eExportMemoryAllocateInfo;
-
-  public:
-    const void* pNext = nullptr;
-    ExternalMemoryHandleTypeFlags handleTypes;
-  };
-  static_assert( sizeof( ExportMemoryAllocateInfo ) == sizeof( VkExportMemoryAllocateInfo ), "struct and wrapper have different size!" );
-
-  using ExportMemoryAllocateInfoKHR = ExportMemoryAllocateInfo;
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-  struct ImportMemoryWin32HandleInfoKHR
-  {
-    ImportMemoryWin32HandleInfoKHR( ExternalMemoryHandleTypeFlagBits handleType_ = ExternalMemoryHandleTypeFlagBits::eOpaqueFd,
-                                    HANDLE handle_ = 0,
-                                    LPCWSTR name_ = 0 )
-      : handleType( handleType_ )
-      , handle( handle_ )
-      , name( name_ )
-    {
-    }
-
-    ImportMemoryWin32HandleInfoKHR( VkImportMemoryWin32HandleInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ImportMemoryWin32HandleInfoKHR ) );
-    }
-
-    ImportMemoryWin32HandleInfoKHR& operator=( VkImportMemoryWin32HandleInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ImportMemoryWin32HandleInfoKHR ) );
-      return *this;
-    }
-    ImportMemoryWin32HandleInfoKHR& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    ImportMemoryWin32HandleInfoKHR& setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ )
-    {
-      handleType = handleType_;
-      return *this;
-    }
-
-    ImportMemoryWin32HandleInfoKHR& setHandle( HANDLE handle_ )
-    {
-      handle = handle_;
-      return *this;
-    }
-
-    ImportMemoryWin32HandleInfoKHR& setName( LPCWSTR name_ )
-    {
-      name = name_;
-      return *this;
-    }
-
-    operator VkImportMemoryWin32HandleInfoKHR const&() const
-    {
-      return *reinterpret_cast<const VkImportMemoryWin32HandleInfoKHR*>(this);
-    }
-
-    operator VkImportMemoryWin32HandleInfoKHR &()
-    {
-      return *reinterpret_cast<VkImportMemoryWin32HandleInfoKHR*>(this);
-    }
-
-    bool operator==( ImportMemoryWin32HandleInfoKHR const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( handleType == rhs.handleType )
-          && ( handle == rhs.handle )
-          && ( name == rhs.name );
-    }
-
-    bool operator!=( ImportMemoryWin32HandleInfoKHR const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eImportMemoryWin32HandleInfoKHR;
-
-  public:
-    const void* pNext = nullptr;
-    ExternalMemoryHandleTypeFlagBits handleType;
-    HANDLE handle;
-    LPCWSTR name;
-  };
-  static_assert( sizeof( ImportMemoryWin32HandleInfoKHR ) == sizeof( VkImportMemoryWin32HandleInfoKHR ), "struct and wrapper have different size!" );
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-  struct MemoryGetWin32HandleInfoKHR
-  {
-    MemoryGetWin32HandleInfoKHR( DeviceMemory memory_ = DeviceMemory(),
-                                 ExternalMemoryHandleTypeFlagBits handleType_ = ExternalMemoryHandleTypeFlagBits::eOpaqueFd )
-      : memory( memory_ )
-      , handleType( handleType_ )
-    {
-    }
-
-    MemoryGetWin32HandleInfoKHR( VkMemoryGetWin32HandleInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( MemoryGetWin32HandleInfoKHR ) );
-    }
-
-    MemoryGetWin32HandleInfoKHR& operator=( VkMemoryGetWin32HandleInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( MemoryGetWin32HandleInfoKHR ) );
-      return *this;
-    }
-    MemoryGetWin32HandleInfoKHR& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    MemoryGetWin32HandleInfoKHR& setMemory( DeviceMemory memory_ )
-    {
-      memory = memory_;
-      return *this;
-    }
-
-    MemoryGetWin32HandleInfoKHR& setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ )
-    {
-      handleType = handleType_;
-      return *this;
-    }
-
-    operator VkMemoryGetWin32HandleInfoKHR const&() const
-    {
-      return *reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR*>(this);
-    }
-
-    operator VkMemoryGetWin32HandleInfoKHR &()
-    {
-      return *reinterpret_cast<VkMemoryGetWin32HandleInfoKHR*>(this);
-    }
-
-    bool operator==( MemoryGetWin32HandleInfoKHR const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( memory == rhs.memory )
-          && ( handleType == rhs.handleType );
-    }
-
-    bool operator!=( MemoryGetWin32HandleInfoKHR const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eMemoryGetWin32HandleInfoKHR;
-
-  public:
-    const void* pNext = nullptr;
-    DeviceMemory memory;
-    ExternalMemoryHandleTypeFlagBits handleType;
-  };
-  static_assert( sizeof( MemoryGetWin32HandleInfoKHR ) == sizeof( VkMemoryGetWin32HandleInfoKHR ), "struct and wrapper have different size!" );
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-
-  struct ImportMemoryFdInfoKHR
-  {
-    ImportMemoryFdInfoKHR( ExternalMemoryHandleTypeFlagBits handleType_ = ExternalMemoryHandleTypeFlagBits::eOpaqueFd,
-                           int fd_ = 0 )
-      : handleType( handleType_ )
-      , fd( fd_ )
-    {
-    }
-
-    ImportMemoryFdInfoKHR( VkImportMemoryFdInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ImportMemoryFdInfoKHR ) );
-    }
-
-    ImportMemoryFdInfoKHR& operator=( VkImportMemoryFdInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ImportMemoryFdInfoKHR ) );
-      return *this;
-    }
-    ImportMemoryFdInfoKHR& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    ImportMemoryFdInfoKHR& setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ )
-    {
-      handleType = handleType_;
-      return *this;
-    }
-
-    ImportMemoryFdInfoKHR& setFd( int fd_ )
-    {
-      fd = fd_;
-      return *this;
-    }
-
-    operator VkImportMemoryFdInfoKHR const&() const
-    {
-      return *reinterpret_cast<const VkImportMemoryFdInfoKHR*>(this);
-    }
-
-    operator VkImportMemoryFdInfoKHR &()
-    {
-      return *reinterpret_cast<VkImportMemoryFdInfoKHR*>(this);
-    }
-
-    bool operator==( ImportMemoryFdInfoKHR const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( handleType == rhs.handleType )
-          && ( fd == rhs.fd );
-    }
-
-    bool operator!=( ImportMemoryFdInfoKHR const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eImportMemoryFdInfoKHR;
-
-  public:
-    const void* pNext = nullptr;
-    ExternalMemoryHandleTypeFlagBits handleType;
-    int fd;
-  };
-  static_assert( sizeof( ImportMemoryFdInfoKHR ) == sizeof( VkImportMemoryFdInfoKHR ), "struct and wrapper have different size!" );
-
-  struct MemoryGetFdInfoKHR
-  {
-    MemoryGetFdInfoKHR( DeviceMemory memory_ = DeviceMemory(),
-                        ExternalMemoryHandleTypeFlagBits handleType_ = ExternalMemoryHandleTypeFlagBits::eOpaqueFd )
-      : memory( memory_ )
-      , handleType( handleType_ )
-    {
-    }
-
-    MemoryGetFdInfoKHR( VkMemoryGetFdInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( MemoryGetFdInfoKHR ) );
-    }
-
-    MemoryGetFdInfoKHR& operator=( VkMemoryGetFdInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( MemoryGetFdInfoKHR ) );
-      return *this;
-    }
-    MemoryGetFdInfoKHR& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    MemoryGetFdInfoKHR& setMemory( DeviceMemory memory_ )
-    {
-      memory = memory_;
-      return *this;
-    }
-
-    MemoryGetFdInfoKHR& setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ )
-    {
-      handleType = handleType_;
-      return *this;
-    }
-
-    operator VkMemoryGetFdInfoKHR const&() const
-    {
-      return *reinterpret_cast<const VkMemoryGetFdInfoKHR*>(this);
-    }
-
-    operator VkMemoryGetFdInfoKHR &()
-    {
-      return *reinterpret_cast<VkMemoryGetFdInfoKHR*>(this);
-    }
-
-    bool operator==( MemoryGetFdInfoKHR const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( memory == rhs.memory )
-          && ( handleType == rhs.handleType );
-    }
-
-    bool operator!=( MemoryGetFdInfoKHR const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eMemoryGetFdInfoKHR;
-
-  public:
-    const void* pNext = nullptr;
-    DeviceMemory memory;
-    ExternalMemoryHandleTypeFlagBits handleType;
-  };
-  static_assert( sizeof( MemoryGetFdInfoKHR ) == sizeof( VkMemoryGetFdInfoKHR ), "struct and wrapper have different size!" );
-
-  struct ImportMemoryHostPointerInfoEXT
-  {
-    ImportMemoryHostPointerInfoEXT( ExternalMemoryHandleTypeFlagBits handleType_ = ExternalMemoryHandleTypeFlagBits::eOpaqueFd,
-                                    void* pHostPointer_ = nullptr )
-      : handleType( handleType_ )
-      , pHostPointer( pHostPointer_ )
-    {
-    }
-
-    ImportMemoryHostPointerInfoEXT( VkImportMemoryHostPointerInfoEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ImportMemoryHostPointerInfoEXT ) );
-    }
-
-    ImportMemoryHostPointerInfoEXT& operator=( VkImportMemoryHostPointerInfoEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ImportMemoryHostPointerInfoEXT ) );
-      return *this;
-    }
-    ImportMemoryHostPointerInfoEXT& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    ImportMemoryHostPointerInfoEXT& setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ )
-    {
-      handleType = handleType_;
-      return *this;
-    }
-
-    ImportMemoryHostPointerInfoEXT& setPHostPointer( void* pHostPointer_ )
-    {
-      pHostPointer = pHostPointer_;
-      return *this;
-    }
-
-    operator VkImportMemoryHostPointerInfoEXT const&() const
-    {
-      return *reinterpret_cast<const VkImportMemoryHostPointerInfoEXT*>(this);
-    }
-
-    operator VkImportMemoryHostPointerInfoEXT &()
-    {
-      return *reinterpret_cast<VkImportMemoryHostPointerInfoEXT*>(this);
-    }
-
-    bool operator==( ImportMemoryHostPointerInfoEXT const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( handleType == rhs.handleType )
-          && ( pHostPointer == rhs.pHostPointer );
-    }
-
-    bool operator!=( ImportMemoryHostPointerInfoEXT const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eImportMemoryHostPointerInfoEXT;
-
-  public:
-    const void* pNext = nullptr;
-    ExternalMemoryHandleTypeFlagBits handleType;
-    void* pHostPointer;
-  };
-  static_assert( sizeof( ImportMemoryHostPointerInfoEXT ) == sizeof( VkImportMemoryHostPointerInfoEXT ), "struct and wrapper have different size!" );
-
-  enum class ExternalMemoryFeatureFlagBits
-  {
-    eDedicatedOnly = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT,
-    eDedicatedOnlyKHR = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT,
-    eExportable = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT,
-    eExportableKHR = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT,
-    eImportable = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT,
-    eImportableKHR = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT
-  };
-
-  using ExternalMemoryFeatureFlags = Flags<ExternalMemoryFeatureFlagBits, VkExternalMemoryFeatureFlags>;
-
-  VULKAN_HPP_INLINE ExternalMemoryFeatureFlags operator|( ExternalMemoryFeatureFlagBits bit0, ExternalMemoryFeatureFlagBits bit1 )
-  {
-    return ExternalMemoryFeatureFlags( bit0 ) | bit1;
-  }
-
-  VULKAN_HPP_INLINE ExternalMemoryFeatureFlags operator~( ExternalMemoryFeatureFlagBits bits )
-  {
-    return ~( ExternalMemoryFeatureFlags( bits ) );
-  }
-
-  template <> struct FlagTraits<ExternalMemoryFeatureFlagBits>
-  {
-    enum
-    {
-      allFlags = VkFlags(ExternalMemoryFeatureFlagBits::eDedicatedOnly) | VkFlags(ExternalMemoryFeatureFlagBits::eExportable) | VkFlags(ExternalMemoryFeatureFlagBits::eImportable)
-    };
-  };
-
-  using ExternalMemoryFeatureFlagsKHR = ExternalMemoryFeatureFlags;
-
-  struct ExternalMemoryProperties
-  {
-    operator VkExternalMemoryProperties const&() const
-    {
-      return *reinterpret_cast<const VkExternalMemoryProperties*>(this);
-    }
-
-    operator VkExternalMemoryProperties &()
-    {
-      return *reinterpret_cast<VkExternalMemoryProperties*>(this);
-    }
-
-    bool operator==( ExternalMemoryProperties const& rhs ) const
-    {
-      return ( externalMemoryFeatures == rhs.externalMemoryFeatures )
-          && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes )
-          && ( compatibleHandleTypes == rhs.compatibleHandleTypes );
-    }
-
-    bool operator!=( ExternalMemoryProperties const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    ExternalMemoryFeatureFlags externalMemoryFeatures;
-    ExternalMemoryHandleTypeFlags exportFromImportedHandleTypes;
-    ExternalMemoryHandleTypeFlags compatibleHandleTypes;
-  };
-  static_assert( sizeof( ExternalMemoryProperties ) == sizeof( VkExternalMemoryProperties ), "struct and wrapper have different size!" );
-
-  using ExternalMemoryPropertiesKHR = ExternalMemoryProperties;
-
-  struct ExternalImageFormatProperties
-  {
-    operator VkExternalImageFormatProperties const&() const
-    {
-      return *reinterpret_cast<const VkExternalImageFormatProperties*>(this);
-    }
-
-    operator VkExternalImageFormatProperties &()
-    {
-      return *reinterpret_cast<VkExternalImageFormatProperties*>(this);
-    }
-
-    bool operator==( ExternalImageFormatProperties const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( externalMemoryProperties == rhs.externalMemoryProperties );
-    }
-
-    bool operator!=( ExternalImageFormatProperties const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eExternalImageFormatProperties;
-
-  public:
-    void* pNext = nullptr;
-    ExternalMemoryProperties externalMemoryProperties;
-  };
-  static_assert( sizeof( ExternalImageFormatProperties ) == sizeof( VkExternalImageFormatProperties ), "struct and wrapper have different size!" );
-
-  using ExternalImageFormatPropertiesKHR = ExternalImageFormatProperties;
-
-  struct ExternalBufferProperties
-  {
-    operator VkExternalBufferProperties const&() const
-    {
-      return *reinterpret_cast<const VkExternalBufferProperties*>(this);
-    }
-
-    operator VkExternalBufferProperties &()
-    {
-      return *reinterpret_cast<VkExternalBufferProperties*>(this);
-    }
-
-    bool operator==( ExternalBufferProperties const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( externalMemoryProperties == rhs.externalMemoryProperties );
-    }
-
-    bool operator!=( ExternalBufferProperties const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eExternalBufferProperties;
-
-  public:
-    void* pNext = nullptr;
-    ExternalMemoryProperties externalMemoryProperties;
-  };
-  static_assert( sizeof( ExternalBufferProperties ) == sizeof( VkExternalBufferProperties ), "struct and wrapper have different size!" );
-
-  using ExternalBufferPropertiesKHR = ExternalBufferProperties;
-
-  enum class ExternalSemaphoreHandleTypeFlagBits
-  {
-    eOpaqueFd = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT,
-    eOpaqueFdKHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT,
-    eOpaqueWin32 = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT,
-    eOpaqueWin32KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT,
-    eOpaqueWin32Kmt = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT,
-    eOpaqueWin32KmtKHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT,
-    eD3D12Fence = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT,
-    eD3D12FenceKHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT,
-    eSyncFd = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT,
-    eSyncFdKHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT
-  };
-
-  using ExternalSemaphoreHandleTypeFlags = Flags<ExternalSemaphoreHandleTypeFlagBits, VkExternalSemaphoreHandleTypeFlags>;
-
-  VULKAN_HPP_INLINE ExternalSemaphoreHandleTypeFlags operator|( ExternalSemaphoreHandleTypeFlagBits bit0, ExternalSemaphoreHandleTypeFlagBits bit1 )
-  {
-    return ExternalSemaphoreHandleTypeFlags( bit0 ) | bit1;
-  }
-
-  VULKAN_HPP_INLINE ExternalSemaphoreHandleTypeFlags operator~( ExternalSemaphoreHandleTypeFlagBits bits )
-  {
-    return ~( ExternalSemaphoreHandleTypeFlags( bits ) );
-  }
-
-  template <> struct FlagTraits<ExternalSemaphoreHandleTypeFlagBits>
-  {
-    enum
-    {
-      allFlags = VkFlags(ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd) | VkFlags(ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32) | VkFlags(ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt) | VkFlags(ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence) | VkFlags(ExternalSemaphoreHandleTypeFlagBits::eSyncFd)
-    };
-  };
-
-  using ExternalSemaphoreHandleTypeFlagsKHR = ExternalSemaphoreHandleTypeFlags;
-
-  struct PhysicalDeviceExternalSemaphoreInfo
-  {
-    PhysicalDeviceExternalSemaphoreInfo( ExternalSemaphoreHandleTypeFlagBits handleType_ = ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd )
-      : handleType( handleType_ )
-    {
-    }
-
-    PhysicalDeviceExternalSemaphoreInfo( VkPhysicalDeviceExternalSemaphoreInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceExternalSemaphoreInfo ) );
-    }
-
-    PhysicalDeviceExternalSemaphoreInfo& operator=( VkPhysicalDeviceExternalSemaphoreInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceExternalSemaphoreInfo ) );
-      return *this;
-    }
-    PhysicalDeviceExternalSemaphoreInfo& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    PhysicalDeviceExternalSemaphoreInfo& setHandleType( ExternalSemaphoreHandleTypeFlagBits handleType_ )
-    {
-      handleType = handleType_;
-      return *this;
-    }
-
-    operator VkPhysicalDeviceExternalSemaphoreInfo const&() const
-    {
-      return *reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo*>(this);
-    }
-
-    operator VkPhysicalDeviceExternalSemaphoreInfo &()
-    {
-      return *reinterpret_cast<VkPhysicalDeviceExternalSemaphoreInfo*>(this);
-    }
-
-    bool operator==( PhysicalDeviceExternalSemaphoreInfo const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( handleType == rhs.handleType );
-    }
-
-    bool operator!=( PhysicalDeviceExternalSemaphoreInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePhysicalDeviceExternalSemaphoreInfo;
-
-  public:
-    const void* pNext = nullptr;
-    ExternalSemaphoreHandleTypeFlagBits handleType;
-  };
-  static_assert( sizeof( PhysicalDeviceExternalSemaphoreInfo ) == sizeof( VkPhysicalDeviceExternalSemaphoreInfo ), "struct and wrapper have different size!" );
-
-  using PhysicalDeviceExternalSemaphoreInfoKHR = PhysicalDeviceExternalSemaphoreInfo;
-
-  struct ExportSemaphoreCreateInfo
-  {
-    ExportSemaphoreCreateInfo( ExternalSemaphoreHandleTypeFlags handleTypes_ = ExternalSemaphoreHandleTypeFlags() )
-      : handleTypes( handleTypes_ )
-    {
-    }
-
-    ExportSemaphoreCreateInfo( VkExportSemaphoreCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ExportSemaphoreCreateInfo ) );
-    }
-
-    ExportSemaphoreCreateInfo& operator=( VkExportSemaphoreCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ExportSemaphoreCreateInfo ) );
-      return *this;
-    }
-    ExportSemaphoreCreateInfo& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    ExportSemaphoreCreateInfo& setHandleTypes( ExternalSemaphoreHandleTypeFlags handleTypes_ )
-    {
-      handleTypes = handleTypes_;
-      return *this;
-    }
-
-    operator VkExportSemaphoreCreateInfo const&() const
-    {
-      return *reinterpret_cast<const VkExportSemaphoreCreateInfo*>(this);
-    }
-
-    operator VkExportSemaphoreCreateInfo &()
-    {
-      return *reinterpret_cast<VkExportSemaphoreCreateInfo*>(this);
-    }
-
-    bool operator==( ExportSemaphoreCreateInfo const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( handleTypes == rhs.handleTypes );
-    }
-
-    bool operator!=( ExportSemaphoreCreateInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eExportSemaphoreCreateInfo;
-
-  public:
-    const void* pNext = nullptr;
-    ExternalSemaphoreHandleTypeFlags handleTypes;
-  };
-  static_assert( sizeof( ExportSemaphoreCreateInfo ) == sizeof( VkExportSemaphoreCreateInfo ), "struct and wrapper have different size!" );
-
-  using ExportSemaphoreCreateInfoKHR = ExportSemaphoreCreateInfo;
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-  struct SemaphoreGetWin32HandleInfoKHR
-  {
-    SemaphoreGetWin32HandleInfoKHR( Semaphore semaphore_ = Semaphore(),
-                                    ExternalSemaphoreHandleTypeFlagBits handleType_ = ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd )
-      : semaphore( semaphore_ )
-      , handleType( handleType_ )
-    {
-    }
-
-    SemaphoreGetWin32HandleInfoKHR( VkSemaphoreGetWin32HandleInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( SemaphoreGetWin32HandleInfoKHR ) );
-    }
-
-    SemaphoreGetWin32HandleInfoKHR& operator=( VkSemaphoreGetWin32HandleInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( SemaphoreGetWin32HandleInfoKHR ) );
-      return *this;
-    }
-    SemaphoreGetWin32HandleInfoKHR& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    SemaphoreGetWin32HandleInfoKHR& setSemaphore( Semaphore semaphore_ )
-    {
-      semaphore = semaphore_;
-      return *this;
-    }
-
-    SemaphoreGetWin32HandleInfoKHR& setHandleType( ExternalSemaphoreHandleTypeFlagBits handleType_ )
-    {
-      handleType = handleType_;
-      return *this;
-    }
-
-    operator VkSemaphoreGetWin32HandleInfoKHR const&() const
-    {
-      return *reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR*>(this);
-    }
-
-    operator VkSemaphoreGetWin32HandleInfoKHR &()
-    {
-      return *reinterpret_cast<VkSemaphoreGetWin32HandleInfoKHR*>(this);
-    }
-
-    bool operator==( SemaphoreGetWin32HandleInfoKHR const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( semaphore == rhs.semaphore )
-          && ( handleType == rhs.handleType );
-    }
-
-    bool operator!=( SemaphoreGetWin32HandleInfoKHR const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eSemaphoreGetWin32HandleInfoKHR;
-
-  public:
-    const void* pNext = nullptr;
-    Semaphore semaphore;
-    ExternalSemaphoreHandleTypeFlagBits handleType;
-  };
-  static_assert( sizeof( SemaphoreGetWin32HandleInfoKHR ) == sizeof( VkSemaphoreGetWin32HandleInfoKHR ), "struct and wrapper have different size!" );
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-
-  struct SemaphoreGetFdInfoKHR
-  {
-    SemaphoreGetFdInfoKHR( Semaphore semaphore_ = Semaphore(),
-                           ExternalSemaphoreHandleTypeFlagBits handleType_ = ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd )
-      : semaphore( semaphore_ )
-      , handleType( handleType_ )
-    {
-    }
-
-    SemaphoreGetFdInfoKHR( VkSemaphoreGetFdInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( SemaphoreGetFdInfoKHR ) );
-    }
-
-    SemaphoreGetFdInfoKHR& operator=( VkSemaphoreGetFdInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( SemaphoreGetFdInfoKHR ) );
-      return *this;
-    }
-    SemaphoreGetFdInfoKHR& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    SemaphoreGetFdInfoKHR& setSemaphore( Semaphore semaphore_ )
-    {
-      semaphore = semaphore_;
-      return *this;
-    }
-
-    SemaphoreGetFdInfoKHR& setHandleType( ExternalSemaphoreHandleTypeFlagBits handleType_ )
-    {
-      handleType = handleType_;
-      return *this;
-    }
-
-    operator VkSemaphoreGetFdInfoKHR const&() const
-    {
-      return *reinterpret_cast<const VkSemaphoreGetFdInfoKHR*>(this);
-    }
-
-    operator VkSemaphoreGetFdInfoKHR &()
-    {
-      return *reinterpret_cast<VkSemaphoreGetFdInfoKHR*>(this);
-    }
-
-    bool operator==( SemaphoreGetFdInfoKHR const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( semaphore == rhs.semaphore )
-          && ( handleType == rhs.handleType );
-    }
-
-    bool operator!=( SemaphoreGetFdInfoKHR const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eSemaphoreGetFdInfoKHR;
-
-  public:
-    const void* pNext = nullptr;
-    Semaphore semaphore;
-    ExternalSemaphoreHandleTypeFlagBits handleType;
-  };
-  static_assert( sizeof( SemaphoreGetFdInfoKHR ) == sizeof( VkSemaphoreGetFdInfoKHR ), "struct and wrapper have different size!" );
-
-  enum class ExternalSemaphoreFeatureFlagBits
-  {
-    eExportable = VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT,
-    eExportableKHR = VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT,
-    eImportable = VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT,
-    eImportableKHR = VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT
-  };
-
-  using ExternalSemaphoreFeatureFlags = Flags<ExternalSemaphoreFeatureFlagBits, VkExternalSemaphoreFeatureFlags>;
-
-  VULKAN_HPP_INLINE ExternalSemaphoreFeatureFlags operator|( ExternalSemaphoreFeatureFlagBits bit0, ExternalSemaphoreFeatureFlagBits bit1 )
-  {
-    return ExternalSemaphoreFeatureFlags( bit0 ) | bit1;
-  }
-
-  VULKAN_HPP_INLINE ExternalSemaphoreFeatureFlags operator~( ExternalSemaphoreFeatureFlagBits bits )
-  {
-    return ~( ExternalSemaphoreFeatureFlags( bits ) );
-  }
-
-  template <> struct FlagTraits<ExternalSemaphoreFeatureFlagBits>
-  {
-    enum
-    {
-      allFlags = VkFlags(ExternalSemaphoreFeatureFlagBits::eExportable) | VkFlags(ExternalSemaphoreFeatureFlagBits::eImportable)
-    };
-  };
-
-  using ExternalSemaphoreFeatureFlagsKHR = ExternalSemaphoreFeatureFlags;
-
-  struct ExternalSemaphoreProperties
-  {
-    operator VkExternalSemaphoreProperties const&() const
-    {
-      return *reinterpret_cast<const VkExternalSemaphoreProperties*>(this);
-    }
-
-    operator VkExternalSemaphoreProperties &()
-    {
-      return *reinterpret_cast<VkExternalSemaphoreProperties*>(this);
-    }
-
-    bool operator==( ExternalSemaphoreProperties const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes )
-          && ( compatibleHandleTypes == rhs.compatibleHandleTypes )
-          && ( externalSemaphoreFeatures == rhs.externalSemaphoreFeatures );
-    }
-
-    bool operator!=( ExternalSemaphoreProperties const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eExternalSemaphoreProperties;
-
-  public:
-    void* pNext = nullptr;
-    ExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes;
-    ExternalSemaphoreHandleTypeFlags compatibleHandleTypes;
-    ExternalSemaphoreFeatureFlags externalSemaphoreFeatures;
-  };
-  static_assert( sizeof( ExternalSemaphoreProperties ) == sizeof( VkExternalSemaphoreProperties ), "struct and wrapper have different size!" );
-
-  using ExternalSemaphorePropertiesKHR = ExternalSemaphoreProperties;
-
-  enum class SemaphoreImportFlagBits
-  {
-    eTemporary = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT,
-    eTemporaryKHR = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT
-  };
-
-  using SemaphoreImportFlags = Flags<SemaphoreImportFlagBits, VkSemaphoreImportFlags>;
-
-  VULKAN_HPP_INLINE SemaphoreImportFlags operator|( SemaphoreImportFlagBits bit0, SemaphoreImportFlagBits bit1 )
-  {
-    return SemaphoreImportFlags( bit0 ) | bit1;
-  }
-
-  VULKAN_HPP_INLINE SemaphoreImportFlags operator~( SemaphoreImportFlagBits bits )
-  {
-    return ~( SemaphoreImportFlags( bits ) );
-  }
-
-  template <> struct FlagTraits<SemaphoreImportFlagBits>
-  {
-    enum
-    {
-      allFlags = VkFlags(SemaphoreImportFlagBits::eTemporary)
-    };
-  };
-
-  using SemaphoreImportFlagsKHR = SemaphoreImportFlags;
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-  struct ImportSemaphoreWin32HandleInfoKHR
-  {
-    ImportSemaphoreWin32HandleInfoKHR( Semaphore semaphore_ = Semaphore(),
-                                       SemaphoreImportFlags flags_ = SemaphoreImportFlags(),
-                                       ExternalSemaphoreHandleTypeFlagBits handleType_ = ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd,
-                                       HANDLE handle_ = 0,
-                                       LPCWSTR name_ = 0 )
-      : semaphore( semaphore_ )
-      , flags( flags_ )
-      , handleType( handleType_ )
-      , handle( handle_ )
-      , name( name_ )
-    {
-    }
-
-    ImportSemaphoreWin32HandleInfoKHR( VkImportSemaphoreWin32HandleInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ImportSemaphoreWin32HandleInfoKHR ) );
-    }
-
-    ImportSemaphoreWin32HandleInfoKHR& operator=( VkImportSemaphoreWin32HandleInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ImportSemaphoreWin32HandleInfoKHR ) );
-      return *this;
-    }
-    ImportSemaphoreWin32HandleInfoKHR& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    ImportSemaphoreWin32HandleInfoKHR& setSemaphore( Semaphore semaphore_ )
-    {
-      semaphore = semaphore_;
-      return *this;
-    }
-
-    ImportSemaphoreWin32HandleInfoKHR& setFlags( SemaphoreImportFlags flags_ )
-    {
-      flags = flags_;
-      return *this;
-    }
-
-    ImportSemaphoreWin32HandleInfoKHR& setHandleType( ExternalSemaphoreHandleTypeFlagBits handleType_ )
-    {
-      handleType = handleType_;
-      return *this;
-    }
-
-    ImportSemaphoreWin32HandleInfoKHR& setHandle( HANDLE handle_ )
-    {
-      handle = handle_;
-      return *this;
-    }
-
-    ImportSemaphoreWin32HandleInfoKHR& setName( LPCWSTR name_ )
-    {
-      name = name_;
-      return *this;
-    }
-
-    operator VkImportSemaphoreWin32HandleInfoKHR const&() const
-    {
-      return *reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR*>(this);
-    }
-
-    operator VkImportSemaphoreWin32HandleInfoKHR &()
-    {
-      return *reinterpret_cast<VkImportSemaphoreWin32HandleInfoKHR*>(this);
-    }
-
-    bool operator==( ImportSemaphoreWin32HandleInfoKHR const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( semaphore == rhs.semaphore )
-          && ( flags == rhs.flags )
-          && ( handleType == rhs.handleType )
-          && ( handle == rhs.handle )
-          && ( name == rhs.name );
-    }
-
-    bool operator!=( ImportSemaphoreWin32HandleInfoKHR const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eImportSemaphoreWin32HandleInfoKHR;
-
-  public:
-    const void* pNext = nullptr;
-    Semaphore semaphore;
-    SemaphoreImportFlags flags;
-    ExternalSemaphoreHandleTypeFlagBits handleType;
-    HANDLE handle;
-    LPCWSTR name;
-  };
-  static_assert( sizeof( ImportSemaphoreWin32HandleInfoKHR ) == sizeof( VkImportSemaphoreWin32HandleInfoKHR ), "struct and wrapper have different size!" );
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-
-  struct ImportSemaphoreFdInfoKHR
-  {
-    ImportSemaphoreFdInfoKHR( Semaphore semaphore_ = Semaphore(),
-                              SemaphoreImportFlags flags_ = SemaphoreImportFlags(),
-                              ExternalSemaphoreHandleTypeFlagBits handleType_ = ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd,
-                              int fd_ = 0 )
-      : semaphore( semaphore_ )
-      , flags( flags_ )
-      , handleType( handleType_ )
-      , fd( fd_ )
-    {
-    }
-
-    ImportSemaphoreFdInfoKHR( VkImportSemaphoreFdInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ImportSemaphoreFdInfoKHR ) );
-    }
-
-    ImportSemaphoreFdInfoKHR& operator=( VkImportSemaphoreFdInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ImportSemaphoreFdInfoKHR ) );
-      return *this;
-    }
-    ImportSemaphoreFdInfoKHR& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    ImportSemaphoreFdInfoKHR& setSemaphore( Semaphore semaphore_ )
-    {
-      semaphore = semaphore_;
-      return *this;
-    }
-
-    ImportSemaphoreFdInfoKHR& setFlags( SemaphoreImportFlags flags_ )
-    {
-      flags = flags_;
-      return *this;
-    }
-
-    ImportSemaphoreFdInfoKHR& setHandleType( ExternalSemaphoreHandleTypeFlagBits handleType_ )
-    {
-      handleType = handleType_;
-      return *this;
-    }
-
-    ImportSemaphoreFdInfoKHR& setFd( int fd_ )
-    {
-      fd = fd_;
-      return *this;
-    }
-
-    operator VkImportSemaphoreFdInfoKHR const&() const
-    {
-      return *reinterpret_cast<const VkImportSemaphoreFdInfoKHR*>(this);
-    }
-
-    operator VkImportSemaphoreFdInfoKHR &()
-    {
-      return *reinterpret_cast<VkImportSemaphoreFdInfoKHR*>(this);
-    }
-
-    bool operator==( ImportSemaphoreFdInfoKHR const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( semaphore == rhs.semaphore )
-          && ( flags == rhs.flags )
-          && ( handleType == rhs.handleType )
-          && ( fd == rhs.fd );
-    }
-
-    bool operator!=( ImportSemaphoreFdInfoKHR const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eImportSemaphoreFdInfoKHR;
-
-  public:
-    const void* pNext = nullptr;
-    Semaphore semaphore;
-    SemaphoreImportFlags flags;
-    ExternalSemaphoreHandleTypeFlagBits handleType;
-    int fd;
-  };
-  static_assert( sizeof( ImportSemaphoreFdInfoKHR ) == sizeof( VkImportSemaphoreFdInfoKHR ), "struct and wrapper have different size!" );
-
-  enum class ExternalFenceHandleTypeFlagBits
-  {
-    eOpaqueFd = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT,
-    eOpaqueFdKHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT,
-    eOpaqueWin32 = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT,
-    eOpaqueWin32KHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT,
-    eOpaqueWin32Kmt = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT,
-    eOpaqueWin32KmtKHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT,
-    eSyncFd = VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT,
-    eSyncFdKHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT
-  };
-
-  using ExternalFenceHandleTypeFlags = Flags<ExternalFenceHandleTypeFlagBits, VkExternalFenceHandleTypeFlags>;
-
-  VULKAN_HPP_INLINE ExternalFenceHandleTypeFlags operator|( ExternalFenceHandleTypeFlagBits bit0, ExternalFenceHandleTypeFlagBits bit1 )
-  {
-    return ExternalFenceHandleTypeFlags( bit0 ) | bit1;
-  }
-
-  VULKAN_HPP_INLINE ExternalFenceHandleTypeFlags operator~( ExternalFenceHandleTypeFlagBits bits )
-  {
-    return ~( ExternalFenceHandleTypeFlags( bits ) );
-  }
-
-  template <> struct FlagTraits<ExternalFenceHandleTypeFlagBits>
-  {
-    enum
-    {
-      allFlags = VkFlags(ExternalFenceHandleTypeFlagBits::eOpaqueFd) | VkFlags(ExternalFenceHandleTypeFlagBits::eOpaqueWin32) | VkFlags(ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt) | VkFlags(ExternalFenceHandleTypeFlagBits::eSyncFd)
-    };
-  };
-
-  using ExternalFenceHandleTypeFlagsKHR = ExternalFenceHandleTypeFlags;
-
-  struct PhysicalDeviceExternalFenceInfo
-  {
-    PhysicalDeviceExternalFenceInfo( ExternalFenceHandleTypeFlagBits handleType_ = ExternalFenceHandleTypeFlagBits::eOpaqueFd )
-      : handleType( handleType_ )
-    {
-    }
-
-    PhysicalDeviceExternalFenceInfo( VkPhysicalDeviceExternalFenceInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceExternalFenceInfo ) );
-    }
-
-    PhysicalDeviceExternalFenceInfo& operator=( VkPhysicalDeviceExternalFenceInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PhysicalDeviceExternalFenceInfo ) );
-      return *this;
-    }
-    PhysicalDeviceExternalFenceInfo& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    PhysicalDeviceExternalFenceInfo& setHandleType( ExternalFenceHandleTypeFlagBits handleType_ )
-    {
-      handleType = handleType_;
-      return *this;
-    }
-
-    operator VkPhysicalDeviceExternalFenceInfo const&() const
-    {
-      return *reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo*>(this);
-    }
-
-    operator VkPhysicalDeviceExternalFenceInfo &()
-    {
-      return *reinterpret_cast<VkPhysicalDeviceExternalFenceInfo*>(this);
-    }
-
-    bool operator==( PhysicalDeviceExternalFenceInfo const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( handleType == rhs.handleType );
-    }
-
-    bool operator!=( PhysicalDeviceExternalFenceInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePhysicalDeviceExternalFenceInfo;
-
-  public:
-    const void* pNext = nullptr;
-    ExternalFenceHandleTypeFlagBits handleType;
-  };
-  static_assert( sizeof( PhysicalDeviceExternalFenceInfo ) == sizeof( VkPhysicalDeviceExternalFenceInfo ), "struct and wrapper have different size!" );
-
-  using PhysicalDeviceExternalFenceInfoKHR = PhysicalDeviceExternalFenceInfo;
-
-  struct ExportFenceCreateInfo
-  {
-    ExportFenceCreateInfo( ExternalFenceHandleTypeFlags handleTypes_ = ExternalFenceHandleTypeFlags() )
-      : handleTypes( handleTypes_ )
-    {
-    }
-
-    ExportFenceCreateInfo( VkExportFenceCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ExportFenceCreateInfo ) );
-    }
-
-    ExportFenceCreateInfo& operator=( VkExportFenceCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ExportFenceCreateInfo ) );
-      return *this;
-    }
-    ExportFenceCreateInfo& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    ExportFenceCreateInfo& setHandleTypes( ExternalFenceHandleTypeFlags handleTypes_ )
-    {
-      handleTypes = handleTypes_;
-      return *this;
-    }
-
-    operator VkExportFenceCreateInfo const&() const
-    {
-      return *reinterpret_cast<const VkExportFenceCreateInfo*>(this);
-    }
-
-    operator VkExportFenceCreateInfo &()
-    {
-      return *reinterpret_cast<VkExportFenceCreateInfo*>(this);
-    }
-
-    bool operator==( ExportFenceCreateInfo const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( handleTypes == rhs.handleTypes );
-    }
-
-    bool operator!=( ExportFenceCreateInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eExportFenceCreateInfo;
-
-  public:
-    const void* pNext = nullptr;
-    ExternalFenceHandleTypeFlags handleTypes;
-  };
-  static_assert( sizeof( ExportFenceCreateInfo ) == sizeof( VkExportFenceCreateInfo ), "struct and wrapper have different size!" );
-
-  using ExportFenceCreateInfoKHR = ExportFenceCreateInfo;
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-  struct FenceGetWin32HandleInfoKHR
-  {
-    FenceGetWin32HandleInfoKHR( Fence fence_ = Fence(),
-                                ExternalFenceHandleTypeFlagBits handleType_ = ExternalFenceHandleTypeFlagBits::eOpaqueFd )
-      : fence( fence_ )
-      , handleType( handleType_ )
-    {
-    }
-
-    FenceGetWin32HandleInfoKHR( VkFenceGetWin32HandleInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( FenceGetWin32HandleInfoKHR ) );
-    }
-
-    FenceGetWin32HandleInfoKHR& operator=( VkFenceGetWin32HandleInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( FenceGetWin32HandleInfoKHR ) );
-      return *this;
-    }
-    FenceGetWin32HandleInfoKHR& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    FenceGetWin32HandleInfoKHR& setFence( Fence fence_ )
-    {
-      fence = fence_;
-      return *this;
-    }
-
-    FenceGetWin32HandleInfoKHR& setHandleType( ExternalFenceHandleTypeFlagBits handleType_ )
-    {
-      handleType = handleType_;
-      return *this;
-    }
-
-    operator VkFenceGetWin32HandleInfoKHR const&() const
-    {
-      return *reinterpret_cast<const VkFenceGetWin32HandleInfoKHR*>(this);
-    }
-
-    operator VkFenceGetWin32HandleInfoKHR &()
-    {
-      return *reinterpret_cast<VkFenceGetWin32HandleInfoKHR*>(this);
-    }
-
-    bool operator==( FenceGetWin32HandleInfoKHR const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( fence == rhs.fence )
-          && ( handleType == rhs.handleType );
-    }
-
-    bool operator!=( FenceGetWin32HandleInfoKHR const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eFenceGetWin32HandleInfoKHR;
-
-  public:
-    const void* pNext = nullptr;
-    Fence fence;
-    ExternalFenceHandleTypeFlagBits handleType;
-  };
-  static_assert( sizeof( FenceGetWin32HandleInfoKHR ) == sizeof( VkFenceGetWin32HandleInfoKHR ), "struct and wrapper have different size!" );
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-
-  struct FenceGetFdInfoKHR
-  {
-    FenceGetFdInfoKHR( Fence fence_ = Fence(),
-                       ExternalFenceHandleTypeFlagBits handleType_ = ExternalFenceHandleTypeFlagBits::eOpaqueFd )
-      : fence( fence_ )
-      , handleType( handleType_ )
-    {
-    }
-
-    FenceGetFdInfoKHR( VkFenceGetFdInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( FenceGetFdInfoKHR ) );
-    }
-
-    FenceGetFdInfoKHR& operator=( VkFenceGetFdInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( FenceGetFdInfoKHR ) );
-      return *this;
-    }
-    FenceGetFdInfoKHR& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    FenceGetFdInfoKHR& setFence( Fence fence_ )
-    {
-      fence = fence_;
-      return *this;
-    }
-
-    FenceGetFdInfoKHR& setHandleType( ExternalFenceHandleTypeFlagBits handleType_ )
-    {
-      handleType = handleType_;
-      return *this;
-    }
-
-    operator VkFenceGetFdInfoKHR const&() const
-    {
-      return *reinterpret_cast<const VkFenceGetFdInfoKHR*>(this);
-    }
-
-    operator VkFenceGetFdInfoKHR &()
-    {
-      return *reinterpret_cast<VkFenceGetFdInfoKHR*>(this);
-    }
-
-    bool operator==( FenceGetFdInfoKHR const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( fence == rhs.fence )
-          && ( handleType == rhs.handleType );
-    }
-
-    bool operator!=( FenceGetFdInfoKHR const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eFenceGetFdInfoKHR;
-
-  public:
-    const void* pNext = nullptr;
-    Fence fence;
-    ExternalFenceHandleTypeFlagBits handleType;
-  };
-  static_assert( sizeof( FenceGetFdInfoKHR ) == sizeof( VkFenceGetFdInfoKHR ), "struct and wrapper have different size!" );
-
-  enum class ExternalFenceFeatureFlagBits
-  {
-    eExportable = VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT,
-    eExportableKHR = VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT,
-    eImportable = VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT,
-    eImportableKHR = VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT
-  };
-
-  using ExternalFenceFeatureFlags = Flags<ExternalFenceFeatureFlagBits, VkExternalFenceFeatureFlags>;
-
-  VULKAN_HPP_INLINE ExternalFenceFeatureFlags operator|( ExternalFenceFeatureFlagBits bit0, ExternalFenceFeatureFlagBits bit1 )
-  {
-    return ExternalFenceFeatureFlags( bit0 ) | bit1;
-  }
-
-  VULKAN_HPP_INLINE ExternalFenceFeatureFlags operator~( ExternalFenceFeatureFlagBits bits )
-  {
-    return ~( ExternalFenceFeatureFlags( bits ) );
-  }
-
-  template <> struct FlagTraits<ExternalFenceFeatureFlagBits>
-  {
-    enum
-    {
-      allFlags = VkFlags(ExternalFenceFeatureFlagBits::eExportable) | VkFlags(ExternalFenceFeatureFlagBits::eImportable)
-    };
-  };
-
-  using ExternalFenceFeatureFlagsKHR = ExternalFenceFeatureFlags;
-
-  struct ExternalFenceProperties
-  {
-    operator VkExternalFenceProperties const&() const
-    {
-      return *reinterpret_cast<const VkExternalFenceProperties*>(this);
-    }
-
-    operator VkExternalFenceProperties &()
-    {
-      return *reinterpret_cast<VkExternalFenceProperties*>(this);
-    }
-
-    bool operator==( ExternalFenceProperties const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes )
-          && ( compatibleHandleTypes == rhs.compatibleHandleTypes )
-          && ( externalFenceFeatures == rhs.externalFenceFeatures );
-    }
-
-    bool operator!=( ExternalFenceProperties const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eExternalFenceProperties;
-
-  public:
-    void* pNext = nullptr;
-    ExternalFenceHandleTypeFlags exportFromImportedHandleTypes;
-    ExternalFenceHandleTypeFlags compatibleHandleTypes;
-    ExternalFenceFeatureFlags externalFenceFeatures;
-  };
-  static_assert( sizeof( ExternalFenceProperties ) == sizeof( VkExternalFenceProperties ), "struct and wrapper have different size!" );
-
-  using ExternalFencePropertiesKHR = ExternalFenceProperties;
-
-  enum class FenceImportFlagBits
-  {
-    eTemporary = VK_FENCE_IMPORT_TEMPORARY_BIT,
-    eTemporaryKHR = VK_FENCE_IMPORT_TEMPORARY_BIT
-  };
-
-  using FenceImportFlags = Flags<FenceImportFlagBits, VkFenceImportFlags>;
-
-  VULKAN_HPP_INLINE FenceImportFlags operator|( FenceImportFlagBits bit0, FenceImportFlagBits bit1 )
-  {
-    return FenceImportFlags( bit0 ) | bit1;
-  }
-
-  VULKAN_HPP_INLINE FenceImportFlags operator~( FenceImportFlagBits bits )
-  {
-    return ~( FenceImportFlags( bits ) );
-  }
-
-  template <> struct FlagTraits<FenceImportFlagBits>
-  {
-    enum
-    {
-      allFlags = VkFlags(FenceImportFlagBits::eTemporary)
-    };
-  };
-
-  using FenceImportFlagsKHR = FenceImportFlags;
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-  struct ImportFenceWin32HandleInfoKHR
-  {
-    ImportFenceWin32HandleInfoKHR( Fence fence_ = Fence(),
-                                   FenceImportFlags flags_ = FenceImportFlags(),
-                                   ExternalFenceHandleTypeFlagBits handleType_ = ExternalFenceHandleTypeFlagBits::eOpaqueFd,
-                                   HANDLE handle_ = 0,
-                                   LPCWSTR name_ = 0 )
-      : fence( fence_ )
-      , flags( flags_ )
-      , handleType( handleType_ )
-      , handle( handle_ )
-      , name( name_ )
-    {
-    }
-
-    ImportFenceWin32HandleInfoKHR( VkImportFenceWin32HandleInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ImportFenceWin32HandleInfoKHR ) );
-    }
-
-    ImportFenceWin32HandleInfoKHR& operator=( VkImportFenceWin32HandleInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ImportFenceWin32HandleInfoKHR ) );
-      return *this;
-    }
-    ImportFenceWin32HandleInfoKHR& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    ImportFenceWin32HandleInfoKHR& setFence( Fence fence_ )
-    {
-      fence = fence_;
-      return *this;
-    }
-
-    ImportFenceWin32HandleInfoKHR& setFlags( FenceImportFlags flags_ )
-    {
-      flags = flags_;
-      return *this;
-    }
-
-    ImportFenceWin32HandleInfoKHR& setHandleType( ExternalFenceHandleTypeFlagBits handleType_ )
-    {
-      handleType = handleType_;
-      return *this;
-    }
-
-    ImportFenceWin32HandleInfoKHR& setHandle( HANDLE handle_ )
-    {
-      handle = handle_;
-      return *this;
-    }
-
-    ImportFenceWin32HandleInfoKHR& setName( LPCWSTR name_ )
-    {
-      name = name_;
-      return *this;
-    }
-
-    operator VkImportFenceWin32HandleInfoKHR const&() const
-    {
-      return *reinterpret_cast<const VkImportFenceWin32HandleInfoKHR*>(this);
-    }
-
-    operator VkImportFenceWin32HandleInfoKHR &()
-    {
-      return *reinterpret_cast<VkImportFenceWin32HandleInfoKHR*>(this);
-    }
-
-    bool operator==( ImportFenceWin32HandleInfoKHR const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( fence == rhs.fence )
-          && ( flags == rhs.flags )
-          && ( handleType == rhs.handleType )
-          && ( handle == rhs.handle )
-          && ( name == rhs.name );
-    }
-
-    bool operator!=( ImportFenceWin32HandleInfoKHR const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eImportFenceWin32HandleInfoKHR;
-
-  public:
-    const void* pNext = nullptr;
-    Fence fence;
-    FenceImportFlags flags;
-    ExternalFenceHandleTypeFlagBits handleType;
-    HANDLE handle;
-    LPCWSTR name;
-  };
-  static_assert( sizeof( ImportFenceWin32HandleInfoKHR ) == sizeof( VkImportFenceWin32HandleInfoKHR ), "struct and wrapper have different size!" );
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-
-  struct ImportFenceFdInfoKHR
-  {
-    ImportFenceFdInfoKHR( Fence fence_ = Fence(),
-                          FenceImportFlags flags_ = FenceImportFlags(),
-                          ExternalFenceHandleTypeFlagBits handleType_ = ExternalFenceHandleTypeFlagBits::eOpaqueFd,
-                          int fd_ = 0 )
-      : fence( fence_ )
-      , flags( flags_ )
-      , handleType( handleType_ )
-      , fd( fd_ )
-    {
-    }
-
-    ImportFenceFdInfoKHR( VkImportFenceFdInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ImportFenceFdInfoKHR ) );
-    }
-
-    ImportFenceFdInfoKHR& operator=( VkImportFenceFdInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ImportFenceFdInfoKHR ) );
-      return *this;
-    }
-    ImportFenceFdInfoKHR& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    ImportFenceFdInfoKHR& setFence( Fence fence_ )
-    {
-      fence = fence_;
-      return *this;
-    }
-
-    ImportFenceFdInfoKHR& setFlags( FenceImportFlags flags_ )
-    {
-      flags = flags_;
-      return *this;
-    }
-
-    ImportFenceFdInfoKHR& setHandleType( ExternalFenceHandleTypeFlagBits handleType_ )
-    {
-      handleType = handleType_;
-      return *this;
-    }
-
-    ImportFenceFdInfoKHR& setFd( int fd_ )
-    {
-      fd = fd_;
-      return *this;
-    }
-
-    operator VkImportFenceFdInfoKHR const&() const
-    {
-      return *reinterpret_cast<const VkImportFenceFdInfoKHR*>(this);
-    }
-
-    operator VkImportFenceFdInfoKHR &()
-    {
-      return *reinterpret_cast<VkImportFenceFdInfoKHR*>(this);
-    }
-
-    bool operator==( ImportFenceFdInfoKHR const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( fence == rhs.fence )
-          && ( flags == rhs.flags )
-          && ( handleType == rhs.handleType )
-          && ( fd == rhs.fd );
-    }
-
-    bool operator!=( ImportFenceFdInfoKHR const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eImportFenceFdInfoKHR;
-
-  public:
-    const void* pNext = nullptr;
-    Fence fence;
-    FenceImportFlags flags;
-    ExternalFenceHandleTypeFlagBits handleType;
-    int fd;
-  };
-  static_assert( sizeof( ImportFenceFdInfoKHR ) == sizeof( VkImportFenceFdInfoKHR ), "struct and wrapper have different size!" );
-
-  enum class SurfaceCounterFlagBitsEXT
-  {
-    eVblank = VK_SURFACE_COUNTER_VBLANK_EXT
-  };
-
-  using SurfaceCounterFlagsEXT = Flags<SurfaceCounterFlagBitsEXT, VkSurfaceCounterFlagsEXT>;
-
-  VULKAN_HPP_INLINE SurfaceCounterFlagsEXT operator|( SurfaceCounterFlagBitsEXT bit0, SurfaceCounterFlagBitsEXT bit1 )
-  {
-    return SurfaceCounterFlagsEXT( bit0 ) | bit1;
-  }
-
-  VULKAN_HPP_INLINE SurfaceCounterFlagsEXT operator~( SurfaceCounterFlagBitsEXT bits )
-  {
-    return ~( SurfaceCounterFlagsEXT( bits ) );
-  }
-
-  template <> struct FlagTraits<SurfaceCounterFlagBitsEXT>
-  {
-    enum
-    {
-      allFlags = VkFlags(SurfaceCounterFlagBitsEXT::eVblank)
-    };
+    using Type = FramebufferMixedSamplesCombinationNV;
   };
 
   struct SurfaceCapabilities2EXT
   {
-    operator VkSurfaceCapabilities2EXT const&() const
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceCapabilities2EXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR SurfaceCapabilities2EXT(uint32_t minImageCount_ = {}, uint32_t maxImageCount_ = {}, VULKAN_HPP_NAMESPACE::Extent2D currentExtent_ = {}, VULKAN_HPP_NAMESPACE::Extent2D minImageExtent_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent_ = {}, uint32_t maxImageArrayLayers_ = {}, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {}, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha_ = {}, VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags_ = {}, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT supportedSurfaceCounters_ = {}) VULKAN_HPP_NOEXCEPT
+    : minImageCount( minImageCount_ ), maxImageCount( maxImageCount_ ), currentExtent( currentExtent_ ), minImageExtent( minImageExtent_ ), maxImageExtent( maxImageExtent_ ), maxImageArrayLayers( maxImageArrayLayers_ ), supportedTransforms( supportedTransforms_ ), currentTransform( currentTransform_ ), supportedCompositeAlpha( supportedCompositeAlpha_ ), supportedUsageFlags( supportedUsageFlags_ ), supportedSurfaceCounters( supportedSurfaceCounters_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SurfaceCapabilities2EXT( SurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SurfaceCapabilities2EXT( VkSurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkSurfaceCapabilities2EXT*>(this);
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    SurfaceCapabilities2EXT & operator=( VkSurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT const *>( &rhs );
+      return *this;
     }
 
-    operator VkSurfaceCapabilities2EXT &()
+    SurfaceCapabilities2EXT & operator=( SurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkSurfaceCapabilities2EXT*>(this);
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( SurfaceCapabilities2EXT ) );
+      return *this;
     }
 
-    bool operator==( SurfaceCapabilities2EXT const& rhs ) const
+
+    operator VkSurfaceCapabilities2EXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSurfaceCapabilities2EXT*>( this );
+    }
+
+    operator VkSurfaceCapabilities2EXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSurfaceCapabilities2EXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( SurfaceCapabilities2EXT const& ) const = default;
+#else
+    bool operator==( SurfaceCapabilities2EXT const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
@@ -33638,540 +53720,1332 @@
           && ( supportedSurfaceCounters == rhs.supportedSurfaceCounters );
     }
 
-    bool operator!=( SurfaceCapabilities2EXT const& rhs ) const
+    bool operator!=( SurfaceCapabilities2EXT const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
+#endif
 
-  private:
-    StructureType sType = StructureType::eSurfaceCapabilities2EXT;
+
 
   public:
-    void* pNext = nullptr;
-    uint32_t minImageCount;
-    uint32_t maxImageCount;
-    Extent2D currentExtent;
-    Extent2D minImageExtent;
-    Extent2D maxImageExtent;
-    uint32_t maxImageArrayLayers;
-    SurfaceTransformFlagsKHR supportedTransforms;
-    SurfaceTransformFlagBitsKHR currentTransform;
-    CompositeAlphaFlagsKHR supportedCompositeAlpha;
-    ImageUsageFlags supportedUsageFlags;
-    SurfaceCounterFlagsEXT supportedSurfaceCounters;
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilities2EXT;
+    void* pNext = {};
+    uint32_t minImageCount = {};
+    uint32_t maxImageCount = {};
+    VULKAN_HPP_NAMESPACE::Extent2D currentExtent = {};
+    VULKAN_HPP_NAMESPACE::Extent2D minImageExtent = {};
+    VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent = {};
+    uint32_t maxImageArrayLayers = {};
+    VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {};
+    VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
+    VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha = {};
+    VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags = {};
+    VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT supportedSurfaceCounters = {};
+
   };
   static_assert( sizeof( SurfaceCapabilities2EXT ) == sizeof( VkSurfaceCapabilities2EXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SurfaceCapabilities2EXT>::value, "struct wrapper is not a standard layout!" );
 
-  struct SwapchainCounterCreateInfoEXT
+  template <>
+  struct CppType<StructureType, StructureType::eSurfaceCapabilities2EXT>
   {
-    SwapchainCounterCreateInfoEXT( SurfaceCounterFlagsEXT surfaceCounters_ = SurfaceCounterFlagsEXT() )
-      : surfaceCounters( surfaceCounters_ )
-    {
-    }
+    using Type = SurfaceCapabilities2EXT;
+  };
 
-    SwapchainCounterCreateInfoEXT( VkSwapchainCounterCreateInfoEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( SwapchainCounterCreateInfoEXT ) );
-    }
+  struct SurfaceCapabilitiesKHR
+  {
 
-    SwapchainCounterCreateInfoEXT& operator=( VkSwapchainCounterCreateInfoEXT const & rhs )
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesKHR(uint32_t minImageCount_ = {}, uint32_t maxImageCount_ = {}, VULKAN_HPP_NAMESPACE::Extent2D currentExtent_ = {}, VULKAN_HPP_NAMESPACE::Extent2D minImageExtent_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent_ = {}, uint32_t maxImageArrayLayers_ = {}, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {}, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha_ = {}, VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags_ = {}) VULKAN_HPP_NOEXCEPT
+    : minImageCount( minImageCount_ ), maxImageCount( maxImageCount_ ), currentExtent( currentExtent_ ), minImageExtent( minImageExtent_ ), maxImageExtent( maxImageExtent_ ), maxImageArrayLayers( maxImageArrayLayers_ ), supportedTransforms( supportedTransforms_ ), currentTransform( currentTransform_ ), supportedCompositeAlpha( supportedCompositeAlpha_ ), supportedUsageFlags( supportedUsageFlags_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesKHR( SurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SurfaceCapabilitiesKHR( VkSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      memcpy( this, &rhs, sizeof( SwapchainCounterCreateInfoEXT ) );
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    SurfaceCapabilitiesKHR & operator=( VkSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR const *>( &rhs );
       return *this;
     }
-    SwapchainCounterCreateInfoEXT& setPNext( const void* pNext_ )
+
+    SurfaceCapabilitiesKHR & operator=( SurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( SurfaceCapabilitiesKHR ) );
+      return *this;
+    }
+
+
+    operator VkSurfaceCapabilitiesKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSurfaceCapabilitiesKHR*>( this );
+    }
+
+    operator VkSurfaceCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSurfaceCapabilitiesKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( SurfaceCapabilitiesKHR const& ) const = default;
+#else
+    bool operator==( SurfaceCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( minImageCount == rhs.minImageCount )
+          && ( maxImageCount == rhs.maxImageCount )
+          && ( currentExtent == rhs.currentExtent )
+          && ( minImageExtent == rhs.minImageExtent )
+          && ( maxImageExtent == rhs.maxImageExtent )
+          && ( maxImageArrayLayers == rhs.maxImageArrayLayers )
+          && ( supportedTransforms == rhs.supportedTransforms )
+          && ( currentTransform == rhs.currentTransform )
+          && ( supportedCompositeAlpha == rhs.supportedCompositeAlpha )
+          && ( supportedUsageFlags == rhs.supportedUsageFlags );
+    }
+
+    bool operator!=( SurfaceCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    uint32_t minImageCount = {};
+    uint32_t maxImageCount = {};
+    VULKAN_HPP_NAMESPACE::Extent2D currentExtent = {};
+    VULKAN_HPP_NAMESPACE::Extent2D minImageExtent = {};
+    VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent = {};
+    uint32_t maxImageArrayLayers = {};
+    VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {};
+    VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
+    VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha = {};
+    VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags = {};
+
+  };
+  static_assert( sizeof( SurfaceCapabilitiesKHR ) == sizeof( VkSurfaceCapabilitiesKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SurfaceCapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct SurfaceCapabilities2KHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceCapabilities2KHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR SurfaceCapabilities2KHR(VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities_ = {}) VULKAN_HPP_NOEXCEPT
+    : surfaceCapabilities( surfaceCapabilities_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SurfaceCapabilities2KHR( SurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SurfaceCapabilities2KHR( VkSurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    SurfaceCapabilities2KHR & operator=( VkSurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR const *>( &rhs );
+      return *this;
+    }
+
+    SurfaceCapabilities2KHR & operator=( SurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( SurfaceCapabilities2KHR ) );
+      return *this;
+    }
+
+
+    operator VkSurfaceCapabilities2KHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSurfaceCapabilities2KHR*>( this );
+    }
+
+    operator VkSurfaceCapabilities2KHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSurfaceCapabilities2KHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( SurfaceCapabilities2KHR const& ) const = default;
+#else
+    bool operator==( SurfaceCapabilities2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( surfaceCapabilities == rhs.surfaceCapabilities );
+    }
+
+    bool operator!=( SurfaceCapabilities2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilities2KHR;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities = {};
+
+  };
+  static_assert( sizeof( SurfaceCapabilities2KHR ) == sizeof( VkSurfaceCapabilities2KHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SurfaceCapabilities2KHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eSurfaceCapabilities2KHR>
+  {
+    using Type = SurfaceCapabilities2KHR;
+  };
+
+  struct SurfaceFormatKHR
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR SurfaceFormatKHR(VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::ColorSpaceKHR colorSpace_ = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear) VULKAN_HPP_NOEXCEPT
+    : format( format_ ), colorSpace( colorSpace_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SurfaceFormatKHR( SurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SurfaceFormatKHR( VkSurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    SurfaceFormatKHR & operator=( VkSurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR const *>( &rhs );
+      return *this;
+    }
+
+    SurfaceFormatKHR & operator=( SurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( SurfaceFormatKHR ) );
+      return *this;
+    }
+
+
+    operator VkSurfaceFormatKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSurfaceFormatKHR*>( this );
+    }
+
+    operator VkSurfaceFormatKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSurfaceFormatKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( SurfaceFormatKHR const& ) const = default;
+#else
+    bool operator==( SurfaceFormatKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( format == rhs.format )
+          && ( colorSpace == rhs.colorSpace );
+    }
+
+    bool operator!=( SurfaceFormatKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+    VULKAN_HPP_NAMESPACE::ColorSpaceKHR colorSpace = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear;
+
+  };
+  static_assert( sizeof( SurfaceFormatKHR ) == sizeof( VkSurfaceFormatKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SurfaceFormatKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct SurfaceFormat2KHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceFormat2KHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR SurfaceFormat2KHR(VULKAN_HPP_NAMESPACE::SurfaceFormatKHR surfaceFormat_ = {}) VULKAN_HPP_NOEXCEPT
+    : surfaceFormat( surfaceFormat_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SurfaceFormat2KHR( SurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SurfaceFormat2KHR( VkSurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    SurfaceFormat2KHR & operator=( VkSurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR const *>( &rhs );
+      return *this;
+    }
+
+    SurfaceFormat2KHR & operator=( SurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( SurfaceFormat2KHR ) );
+      return *this;
+    }
+
+
+    operator VkSurfaceFormat2KHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSurfaceFormat2KHR*>( this );
+    }
+
+    operator VkSurfaceFormat2KHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSurfaceFormat2KHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( SurfaceFormat2KHR const& ) const = default;
+#else
+    bool operator==( SurfaceFormat2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( surfaceFormat == rhs.surfaceFormat );
+    }
+
+    bool operator!=( SurfaceFormat2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFormat2KHR;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::SurfaceFormatKHR surfaceFormat = {};
+
+  };
+  static_assert( sizeof( SurfaceFormat2KHR ) == sizeof( VkSurfaceFormat2KHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SurfaceFormat2KHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eSurfaceFormat2KHR>
+  {
+    using Type = SurfaceFormat2KHR;
+  };
+
+  struct PhysicalDeviceToolPropertiesEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceToolPropertiesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceToolPropertiesEXT(std::array<char,VK_MAX_EXTENSION_NAME_SIZE> const& name_ = {}, std::array<char,VK_MAX_EXTENSION_NAME_SIZE> const& version_ = {}, VULKAN_HPP_NAMESPACE::ToolPurposeFlagsEXT purposes_ = {}, std::array<char,VK_MAX_DESCRIPTION_SIZE> const& description_ = {}, std::array<char,VK_MAX_EXTENSION_NAME_SIZE> const& layer_ = {}) VULKAN_HPP_NOEXCEPT
+    : name( name_ ), version( version_ ), purposes( purposes_ ), description( description_ ), layer( layer_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceToolPropertiesEXT( PhysicalDeviceToolPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceToolPropertiesEXT( VkPhysicalDeviceToolPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceToolPropertiesEXT & operator=( VkPhysicalDeviceToolPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolPropertiesEXT const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceToolPropertiesEXT & operator=( PhysicalDeviceToolPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceToolPropertiesEXT ) );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceToolPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceToolPropertiesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceToolPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceToolPropertiesEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceToolPropertiesEXT const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceToolPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( name == rhs.name )
+          && ( version == rhs.version )
+          && ( purposes == rhs.purposes )
+          && ( description == rhs.description )
+          && ( layer == rhs.layer );
+    }
+
+    bool operator!=( PhysicalDeviceToolPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceToolPropertiesEXT;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> name = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> version = {};
+    VULKAN_HPP_NAMESPACE::ToolPurposeFlagsEXT purposes = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> layer = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceToolPropertiesEXT ) == sizeof( VkPhysicalDeviceToolPropertiesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceToolPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceToolPropertiesEXT>
+  {
+    using Type = PhysicalDeviceToolPropertiesEXT;
+  };
+
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch> class UniqueHandleTraits<Device, Dispatch> { public: using deleter = ObjectDestroy<NoParent, Dispatch>; };
+  using UniqueDevice = UniqueHandle<Device, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+
+  class PhysicalDevice
+  {
+  public:
+    using CType = VkPhysicalDevice;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePhysicalDevice;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePhysicalDevice;
+
+  public:
+    VULKAN_HPP_CONSTEXPR PhysicalDevice() VULKAN_HPP_NOEXCEPT
+      : m_physicalDevice(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDevice( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_physicalDevice(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT PhysicalDevice( VkPhysicalDevice physicalDevice ) VULKAN_HPP_NOEXCEPT
+      : m_physicalDevice( physicalDevice )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    PhysicalDevice & operator=(VkPhysicalDevice physicalDevice) VULKAN_HPP_NOEXCEPT
+    {
+      m_physicalDevice = physicalDevice;
+      return *this;
+    }
+#endif
+
+    PhysicalDevice & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_physicalDevice = VK_NULL_HANDLE;
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDevice const& ) const = default;
+#else
+    bool operator==( PhysicalDevice const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_physicalDevice == rhs.m_physicalDevice;
+    }
+
+    bool operator!=(PhysicalDevice const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_physicalDevice != rhs.m_physicalDevice;
+    }
+
+    bool operator<(PhysicalDevice const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_physicalDevice < rhs.m_physicalDevice;
+    }
+#endif
+
+
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result acquireXlibDisplayEXT( Display* dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type acquireXlibDisplayEXT( Display & dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Device* pDevice, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::Device>::type createDevice( const DeviceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Device, Dispatch>>::type createDeviceUnique( const DeviceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DisplayModeKHR* pMode, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayModeKHR>::type createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayModeCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayModeKHR, Dispatch>>::type createDisplayModeKHRUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayModeCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result enumerateDeviceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::ExtensionProperties* pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator  = std::allocator<ExtensionProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<ExtensionProperties,Allocator>>::type enumerateDeviceExtensionProperties( Optional<const std::string> layerName VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template<typename Allocator  = std::allocator<ExtensionProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, ExtensionProperties>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<ExtensionProperties,Allocator>>::type enumerateDeviceExtensionProperties( Optional<const std::string> layerName, Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result enumerateDeviceLayerProperties( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::LayerProperties* pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator  = std::allocator<LayerProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<LayerProperties,Allocator>>::type enumerateDeviceLayerProperties(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template<typename Allocator  = std::allocator<LayerProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, LayerProperties>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<LayerProperties,Allocator>>::type enumerateDeviceLayerProperties(Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, uint32_t* pCounterCount, VULKAN_HPP_NAMESPACE::PerformanceCounterKHR* pCounters VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR* pCounterDescriptions VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Allocator = std::allocator<PerformanceCounterDescriptionKHR>, typename Dispatch  = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PerformanceCounterDescriptionKHR,Allocator>>::type enumerateQueueFamilyPerformanceQueryCountersKHR(  uint32_t queueFamilyIndex, ArrayProxy<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR> const &counters, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT  ) const;
+    template <typename Allocator = std::allocator<PerformanceCounterDescriptionKHR>, typename Dispatch  = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, PerformanceCounterDescriptionKHR>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PerformanceCounterDescriptionKHR,Allocator>>::type enumerateQueueFamilyPerformanceQueryCountersKHR(  uint32_t queueFamilyIndex, ArrayProxy<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR> const &counters, Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT  ) const;
+    template <typename PerformanceCounterKHRAllocator = std::allocator<PerformanceCounterKHR>, typename PerformanceCounterDescriptionKHRAllocator = std::allocator<PerformanceCounterDescriptionKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::pair<std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator>, std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>>::type enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename PerformanceCounterKHRAllocator = std::allocator<PerformanceCounterKHR>, typename PerformanceCounterDescriptionKHRAllocator = std::allocator<PerformanceCounterDescriptionKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B1 = PerformanceCounterKHRAllocator, typename B2 = PerformanceCounterDescriptionKHRAllocator, typename std::enable_if<std::is_same<typename B1::value_type, PerformanceCounterKHR>::value && std::is_same<typename B2::value_type, PerformanceCounterDescriptionKHR>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::pair<std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator>, std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>>::type enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, PerformanceCounterKHRAllocator & performanceCounterKHRAllocator, PerformanceCounterDescriptionKHRAllocator & performanceCounterDescriptionKHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR* pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator  = std::allocator<DisplayModeProperties2KHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<DisplayModeProperties2KHR,Allocator>>::type getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template<typename Allocator  = std::allocator<DisplayModeProperties2KHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, DisplayModeProperties2KHR>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<DisplayModeProperties2KHR,Allocator>>::type getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR* pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator  = std::allocator<DisplayModePropertiesKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<DisplayModePropertiesKHR,Allocator>>::type getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template<typename Allocator  = std::allocator<DisplayModePropertiesKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, DisplayModePropertiesKHR>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<DisplayModePropertiesKHR,Allocator>>::type getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR* pDisplayPlaneInfo, VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR* pCapabilities, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR>::type getDisplayPlaneCapabilities2KHR( const DisplayPlaneInfo2KHR & displayPlaneInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR* pCapabilities, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR>::type getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, uint32_t* pDisplayCount, VULKAN_HPP_NAMESPACE::DisplayKHR* pDisplays, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator  = std::allocator<DisplayKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<DisplayKHR,Allocator>>::type getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template<typename Allocator  = std::allocator<DisplayKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, DisplayKHR>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<DisplayKHR,Allocator>>::type getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getCalibrateableTimeDomainsEXT( uint32_t* pTimeDomainCount, VULKAN_HPP_NAMESPACE::TimeDomainEXT* pTimeDomains, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator  = std::allocator<TimeDomainEXT>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<TimeDomainEXT,Allocator>>::type getCalibrateableTimeDomainsEXT(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template<typename Allocator  = std::allocator<TimeDomainEXT>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, TimeDomainEXT>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<TimeDomainEXT,Allocator>>::type getCalibrateableTimeDomainsEXT(Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getCooperativeMatrixPropertiesNV( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV* pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator  = std::allocator<CooperativeMatrixPropertiesNV>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<CooperativeMatrixPropertiesNV,Allocator>>::type getCooperativeMatrixPropertiesNV(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template<typename Allocator  = std::allocator<CooperativeMatrixPropertiesNV>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, CooperativeMatrixPropertiesNV>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<CooperativeMatrixPropertiesNV,Allocator>>::type getCooperativeMatrixPropertiesNV(Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_DIRECTFB_EXT
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Bool32 getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, IDirectFB* dfb, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Bool32 getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, IDirectFB & dfb, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getDisplayPlaneProperties2KHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR* pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator  = std::allocator<DisplayPlaneProperties2KHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<DisplayPlaneProperties2KHR,Allocator>>::type getDisplayPlaneProperties2KHR(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template<typename Allocator  = std::allocator<DisplayPlaneProperties2KHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, DisplayPlaneProperties2KHR>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<DisplayPlaneProperties2KHR,Allocator>>::type getDisplayPlaneProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getDisplayPlanePropertiesKHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR* pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator  = std::allocator<DisplayPlanePropertiesKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<DisplayPlanePropertiesKHR,Allocator>>::type getDisplayPlanePropertiesKHR(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template<typename Allocator  = std::allocator<DisplayPlanePropertiesKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, DisplayPlanePropertiesKHR>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<DisplayPlanePropertiesKHR,Allocator>>::type getDisplayPlanePropertiesKHR(Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getDisplayProperties2KHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayProperties2KHR* pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator  = std::allocator<DisplayProperties2KHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<DisplayProperties2KHR,Allocator>>::type getDisplayProperties2KHR(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template<typename Allocator  = std::allocator<DisplayProperties2KHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, DisplayProperties2KHR>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<DisplayProperties2KHR,Allocator>>::type getDisplayProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getDisplayPropertiesKHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR* pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator  = std::allocator<DisplayPropertiesKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<DisplayPropertiesKHR,Allocator>>::type getDisplayPropertiesKHR(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template<typename Allocator  = std::allocator<DisplayPropertiesKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, DisplayPropertiesKHR>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<DisplayPropertiesKHR,Allocator>>::type getDisplayPropertiesKHR(Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VULKAN_HPP_NAMESPACE::ExternalBufferProperties* pExternalBufferProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalBufferProperties getExternalBufferProperties( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getExternalBufferPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VULKAN_HPP_NAMESPACE::ExternalBufferProperties* pExternalBufferProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalBufferProperties getExternalBufferPropertiesKHR( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VULKAN_HPP_NAMESPACE::ExternalFenceProperties* pExternalFenceProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalFenceProperties getExternalFenceProperties( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VULKAN_HPP_NAMESPACE::ExternalFenceProperties* pExternalFenceProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalFenceProperties getExternalFencePropertiesKHR( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV* pExternalImageFormatProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV>::type getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties* pExternalSemaphoreProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties getExternalSemaphoreProperties( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getExternalSemaphorePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties* pExternalSemaphoreProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties getExternalSemaphorePropertiesKHR( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures* pFeatures, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures getFeatures( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2* pFeatures, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 getFeatures2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getFeatures2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getFeatures2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2* pFeatures, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 getFeatures2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getFeatures2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties* pFormatProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::FormatProperties getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties2* pFormatProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::FormatProperties2 getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties2* pFormatProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::FormatProperties2 getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getFragmentShadingRatesKHR( uint32_t* pFragmentShadingRateCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR* pFragmentShadingRates, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator  = std::allocator<PhysicalDeviceFragmentShadingRateKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PhysicalDeviceFragmentShadingRateKHR,Allocator>>::type getFragmentShadingRatesKHR(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template<typename Allocator  = std::allocator<PhysicalDeviceFragmentShadingRateKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, PhysicalDeviceFragmentShadingRateKHR>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PhysicalDeviceFragmentShadingRateKHR,Allocator>>::type getFragmentShadingRatesKHR(Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ImageFormatProperties* pImageFormatProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties>::type getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2* pImageFormatInfo, VULKAN_HPP_NAMESPACE::ImageFormatProperties2* pImageFormatProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<StructureChain<X, Y, Z...>>::type getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2* pImageFormatInfo, VULKAN_HPP_NAMESPACE::ImageFormatProperties2* pImageFormatProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<StructureChain<X, Y, Z...>>::type getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getMemoryProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties* pMemoryProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties getMemoryProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2* pMemoryProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 getMemoryProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getMemoryProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getMemoryProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2* pMemoryProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 getMemoryProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getMemoryProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT* pMultisampleProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pRectCount, VULKAN_HPP_NAMESPACE::Rect2D* pRects, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator  = std::allocator<Rect2D>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<Rect2D,Allocator>>::type getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template<typename Allocator  = std::allocator<Rect2D>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, Rect2D>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<Rect2D,Allocator>>::type getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties* pProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties getProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2* pProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 getProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2* pProperties, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 getProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getQueueFamilyPerformanceQueryPassesKHR( const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR* pPerformanceQueryCreateInfo, uint32_t* pNumPasses, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD uint32_t getQueueFamilyPerformanceQueryPassesKHR( const QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getQueueFamilyProperties( uint32_t* pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties* pQueueFamilyProperties VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename QueueFamilyPropertiesAllocator = std::allocator<QueueFamilyProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD std::vector<QueueFamilyProperties, QueueFamilyPropertiesAllocator> getQueueFamilyProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename QueueFamilyPropertiesAllocator = std::allocator<QueueFamilyProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = QueueFamilyPropertiesAllocator, typename std::enable_if<std::is_same<typename B::value_type, QueueFamilyProperties>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD std::vector<QueueFamilyProperties, QueueFamilyPropertiesAllocator> getQueueFamilyProperties( QueueFamilyPropertiesAllocator & queueFamilyPropertiesAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getQueueFamilyProperties2( uint32_t* pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties2* pQueueFamilyProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator  = std::allocator<QueueFamilyProperties2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    std::vector<QueueFamilyProperties2,Allocator> getQueueFamilyProperties2(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template<typename Allocator  = std::allocator<QueueFamilyProperties2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, QueueFamilyProperties2>::value, int>::type = 0>
+    std::vector<QueueFamilyProperties2,Allocator> getQueueFamilyProperties2(Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template<typename StructureChain, typename Allocator  = std::allocator<StructureChain>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    std::vector<StructureChain,Allocator> getQueueFamilyProperties2(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template<typename StructureChain, typename Allocator  = std::allocator<StructureChain>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, StructureChain>::value, int>::type = 0>
+    std::vector<StructureChain,Allocator> getQueueFamilyProperties2(Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getQueueFamilyProperties2KHR( uint32_t* pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties2* pQueueFamilyProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator  = std::allocator<QueueFamilyProperties2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    std::vector<QueueFamilyProperties2,Allocator> getQueueFamilyProperties2KHR(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template<typename Allocator  = std::allocator<QueueFamilyProperties2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, QueueFamilyProperties2>::value, int>::type = 0>
+    std::vector<QueueFamilyProperties2,Allocator> getQueueFamilyProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template<typename StructureChain, typename Allocator  = std::allocator<StructureChain>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    std::vector<StructureChain,Allocator> getQueueFamilyProperties2KHR(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template<typename StructureChain, typename Allocator  = std::allocator<StructureChain>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, StructureChain>::value, int>::type = 0>
+    std::vector<StructureChain,Allocator> getQueueFamilyProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties* pProperties VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename SparseImageFormatPropertiesAllocator = std::allocator<SparseImageFormatProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD std::vector<SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename SparseImageFormatPropertiesAllocator = std::allocator<SparseImageFormatProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = SparseImageFormatPropertiesAllocator, typename std::enable_if<std::is_same<typename B::value_type, SparseImageFormatProperties>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD std::vector<SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, SparseImageFormatPropertiesAllocator & sparseImageFormatPropertiesAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2* pProperties VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename SparseImageFormatProperties2Allocator = std::allocator<SparseImageFormatProperties2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename SparseImageFormatProperties2Allocator = std::allocator<SparseImageFormatProperties2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = SparseImageFormatProperties2Allocator, typename std::enable_if<std::is_same<typename B::value_type, SparseImageFormatProperties2>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2* pProperties VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename SparseImageFormatProperties2Allocator = std::allocator<SparseImageFormatProperties2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename SparseImageFormatProperties2Allocator = std::allocator<SparseImageFormatProperties2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = SparseImageFormatProperties2Allocator, typename std::enable_if<std::is_same<typename B::value_type, SparseImageFormatProperties2>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getSupportedFramebufferMixedSamplesCombinationsNV( uint32_t* pCombinationCount, VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV* pCombinations, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator  = std::allocator<FramebufferMixedSamplesCombinationNV>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<FramebufferMixedSamplesCombinationNV,Allocator>>::type getSupportedFramebufferMixedSamplesCombinationsNV(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template<typename Allocator  = std::allocator<FramebufferMixedSamplesCombinationNV>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, FramebufferMixedSamplesCombinationNV>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<FramebufferMixedSamplesCombinationNV,Allocator>>::type getSupportedFramebufferMixedSamplesCombinationsNV(Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT* pSurfaceCapabilities, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT>::type getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR* pSurfaceCapabilities, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>::type getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<StructureChain<X, Y, Z...>>::type getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR* pSurfaceCapabilities, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR>::type getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR* pSurfaceFormats, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator  = std::allocator<SurfaceFormat2KHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<SurfaceFormat2KHR,Allocator>>::type getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template<typename Allocator  = std::allocator<SurfaceFormat2KHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, SurfaceFormat2KHR>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<SurfaceFormat2KHR,Allocator>>::type getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pSurfaceFormatCount, VULKAN_HPP_NAMESPACE::SurfaceFormatKHR* pSurfaceFormats, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator  = std::allocator<SurfaceFormatKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<SurfaceFormatKHR,Allocator>>::type getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template<typename Allocator  = std::allocator<SurfaceFormatKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, SurfaceFormatKHR>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<SurfaceFormatKHR,Allocator>>::type getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pPresentModeCount, VULKAN_HPP_NAMESPACE::PresentModeKHR* pPresentModes, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator  = std::allocator<PresentModeKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PresentModeKHR,Allocator>>::type getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template<typename Allocator  = std::allocator<PresentModeKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, PresentModeKHR>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PresentModeKHR,Allocator>>::type getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pPresentModeCount, VULKAN_HPP_NAMESPACE::PresentModeKHR* pPresentModes, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator  = std::allocator<PresentModeKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PresentModeKHR,Allocator>>::type getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template<typename Allocator  = std::allocator<PresentModeKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, PresentModeKHR>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PresentModeKHR,Allocator>>::type getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::Bool32* pSupported, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::Bool32>::type getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getToolPropertiesEXT( uint32_t* pToolCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceToolPropertiesEXT* pToolProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator  = std::allocator<PhysicalDeviceToolPropertiesEXT>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PhysicalDeviceToolPropertiesEXT,Allocator>>::type getToolPropertiesEXT(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template<typename Allocator  = std::allocator<PhysicalDeviceToolPropertiesEXT>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, PhysicalDeviceToolPropertiesEXT>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PhysicalDeviceToolPropertiesEXT,Allocator>>::type getToolPropertiesEXT(Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display* display, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display & display, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Bool32 getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#else
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Bool32 getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Bool32 getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Bool32 getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t & connection, xcb_visualid_t visual_id, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Bool32 getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display* dpy, VisualID visualID, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Bool32 getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result getRandROutputDisplayEXT( Display* dpy, RROutput rrOutput, VULKAN_HPP_NAMESPACE::DisplayKHR* pDisplay, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>>::type getRandROutputDisplayEXTUnique( Display & dpy, RROutput rrOutput, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<void>::type releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPhysicalDevice() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_physicalDevice;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_physicalDevice != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_physicalDevice == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkPhysicalDevice m_physicalDevice;
+  };
+  static_assert( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice ) == sizeof( VkPhysicalDevice ), "handle and wrapper have different size!" );
+
+  template <>
+  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::ePhysicalDevice>
+  {
+    using type = VULKAN_HPP_NAMESPACE::PhysicalDevice;
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::ePhysicalDevice>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::PhysicalDevice;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePhysicalDevice>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::PhysicalDevice;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::PhysicalDevice>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  struct DeviceGroupDeviceCreateInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupDeviceCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DeviceGroupDeviceCreateInfo(uint32_t physicalDeviceCount_ = {}, const VULKAN_HPP_NAMESPACE::PhysicalDevice* pPhysicalDevices_ = {}) VULKAN_HPP_NOEXCEPT
+    : physicalDeviceCount( physicalDeviceCount_ ), pPhysicalDevices( pPhysicalDevices_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DeviceGroupDeviceCreateInfo( DeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DeviceGroupDeviceCreateInfo( VkDeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    DeviceGroupDeviceCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PhysicalDevice> const & physicalDevices_ )
+    : physicalDeviceCount( static_cast<uint32_t>( physicalDevices_.size() ) ), pPhysicalDevices( physicalDevices_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DeviceGroupDeviceCreateInfo & operator=( VkDeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupDeviceCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+    DeviceGroupDeviceCreateInfo & operator=( DeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DeviceGroupDeviceCreateInfo ) );
+      return *this;
+    }
+
+    DeviceGroupDeviceCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
       pNext = pNext_;
       return *this;
     }
 
-    SwapchainCounterCreateInfoEXT& setSurfaceCounters( SurfaceCounterFlagsEXT surfaceCounters_ )
+    DeviceGroupDeviceCreateInfo & setPhysicalDeviceCount( uint32_t physicalDeviceCount_ ) VULKAN_HPP_NOEXCEPT
     {
-      surfaceCounters = surfaceCounters_;
+      physicalDeviceCount = physicalDeviceCount_;
       return *this;
     }
 
-    operator VkSwapchainCounterCreateInfoEXT const&() const
+    DeviceGroupDeviceCreateInfo & setPPhysicalDevices( const VULKAN_HPP_NAMESPACE::PhysicalDevice* pPhysicalDevices_ ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkSwapchainCounterCreateInfoEXT*>(this);
+      pPhysicalDevices = pPhysicalDevices_;
+      return *this;
     }
 
-    operator VkSwapchainCounterCreateInfoEXT &()
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    DeviceGroupDeviceCreateInfo & setPhysicalDevices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PhysicalDevice> const & physicalDevices_ ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkSwapchainCounterCreateInfoEXT*>(this);
+      physicalDeviceCount = static_cast<uint32_t>( physicalDevices_.size() );
+      pPhysicalDevices = physicalDevices_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkDeviceGroupDeviceCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceGroupDeviceCreateInfo*>( this );
     }
 
-    bool operator==( SwapchainCounterCreateInfoEXT const& rhs ) const
+    operator VkDeviceGroupDeviceCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceGroupDeviceCreateInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DeviceGroupDeviceCreateInfo const& ) const = default;
+#else
+    bool operator==( DeviceGroupDeviceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
-          && ( surfaceCounters == rhs.surfaceCounters );
+          && ( physicalDeviceCount == rhs.physicalDeviceCount )
+          && ( pPhysicalDevices == rhs.pPhysicalDevices );
     }
 
-    bool operator!=( SwapchainCounterCreateInfoEXT const& rhs ) const
+    bool operator!=( DeviceGroupDeviceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
+#endif
 
-  private:
-    StructureType sType = StructureType::eSwapchainCounterCreateInfoEXT;
+
 
   public:
-    const void* pNext = nullptr;
-    SurfaceCounterFlagsEXT surfaceCounters;
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupDeviceCreateInfo;
+    const void* pNext = {};
+    uint32_t physicalDeviceCount = {};
+    const VULKAN_HPP_NAMESPACE::PhysicalDevice* pPhysicalDevices = {};
+
   };
-  static_assert( sizeof( SwapchainCounterCreateInfoEXT ) == sizeof( VkSwapchainCounterCreateInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( sizeof( DeviceGroupDeviceCreateInfo ) == sizeof( VkDeviceGroupDeviceCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DeviceGroupDeviceCreateInfo>::value, "struct wrapper is not a standard layout!" );
 
-  enum class DisplayPowerStateEXT
+  template <>
+  struct CppType<StructureType, StructureType::eDeviceGroupDeviceCreateInfo>
   {
-    eOff = VK_DISPLAY_POWER_STATE_OFF_EXT,
-    eSuspend = VK_DISPLAY_POWER_STATE_SUSPEND_EXT,
-    eOn = VK_DISPLAY_POWER_STATE_ON_EXT
+    using Type = DeviceGroupDeviceCreateInfo;
   };
-
-  struct DisplayPowerInfoEXT
-  {
-    DisplayPowerInfoEXT( DisplayPowerStateEXT powerState_ = DisplayPowerStateEXT::eOff )
-      : powerState( powerState_ )
-    {
-    }
-
-    DisplayPowerInfoEXT( VkDisplayPowerInfoEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DisplayPowerInfoEXT ) );
-    }
-
-    DisplayPowerInfoEXT& operator=( VkDisplayPowerInfoEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DisplayPowerInfoEXT ) );
-      return *this;
-    }
-    DisplayPowerInfoEXT& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    DisplayPowerInfoEXT& setPowerState( DisplayPowerStateEXT powerState_ )
-    {
-      powerState = powerState_;
-      return *this;
-    }
-
-    operator VkDisplayPowerInfoEXT const&() const
-    {
-      return *reinterpret_cast<const VkDisplayPowerInfoEXT*>(this);
-    }
-
-    operator VkDisplayPowerInfoEXT &()
-    {
-      return *reinterpret_cast<VkDisplayPowerInfoEXT*>(this);
-    }
-
-    bool operator==( DisplayPowerInfoEXT const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( powerState == rhs.powerState );
-    }
-
-    bool operator!=( DisplayPowerInfoEXT const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eDisplayPowerInfoEXT;
-
-  public:
-    const void* pNext = nullptr;
-    DisplayPowerStateEXT powerState;
-  };
-  static_assert( sizeof( DisplayPowerInfoEXT ) == sizeof( VkDisplayPowerInfoEXT ), "struct and wrapper have different size!" );
-
-  enum class DeviceEventTypeEXT
-  {
-    eDisplayHotplug = VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT
-  };
-
-  struct DeviceEventInfoEXT
-  {
-    DeviceEventInfoEXT( DeviceEventTypeEXT deviceEvent_ = DeviceEventTypeEXT::eDisplayHotplug )
-      : deviceEvent( deviceEvent_ )
-    {
-    }
-
-    DeviceEventInfoEXT( VkDeviceEventInfoEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DeviceEventInfoEXT ) );
-    }
-
-    DeviceEventInfoEXT& operator=( VkDeviceEventInfoEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DeviceEventInfoEXT ) );
-      return *this;
-    }
-    DeviceEventInfoEXT& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    DeviceEventInfoEXT& setDeviceEvent( DeviceEventTypeEXT deviceEvent_ )
-    {
-      deviceEvent = deviceEvent_;
-      return *this;
-    }
-
-    operator VkDeviceEventInfoEXT const&() const
-    {
-      return *reinterpret_cast<const VkDeviceEventInfoEXT*>(this);
-    }
-
-    operator VkDeviceEventInfoEXT &()
-    {
-      return *reinterpret_cast<VkDeviceEventInfoEXT*>(this);
-    }
-
-    bool operator==( DeviceEventInfoEXT const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( deviceEvent == rhs.deviceEvent );
-    }
-
-    bool operator!=( DeviceEventInfoEXT const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eDeviceEventInfoEXT;
-
-  public:
-    const void* pNext = nullptr;
-    DeviceEventTypeEXT deviceEvent;
-  };
-  static_assert( sizeof( DeviceEventInfoEXT ) == sizeof( VkDeviceEventInfoEXT ), "struct and wrapper have different size!" );
-
-  enum class DisplayEventTypeEXT
-  {
-    eFirstPixelOut = VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT
-  };
-
-  struct DisplayEventInfoEXT
-  {
-    DisplayEventInfoEXT( DisplayEventTypeEXT displayEvent_ = DisplayEventTypeEXT::eFirstPixelOut )
-      : displayEvent( displayEvent_ )
-    {
-    }
-
-    DisplayEventInfoEXT( VkDisplayEventInfoEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DisplayEventInfoEXT ) );
-    }
-
-    DisplayEventInfoEXT& operator=( VkDisplayEventInfoEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DisplayEventInfoEXT ) );
-      return *this;
-    }
-    DisplayEventInfoEXT& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    DisplayEventInfoEXT& setDisplayEvent( DisplayEventTypeEXT displayEvent_ )
-    {
-      displayEvent = displayEvent_;
-      return *this;
-    }
-
-    operator VkDisplayEventInfoEXT const&() const
-    {
-      return *reinterpret_cast<const VkDisplayEventInfoEXT*>(this);
-    }
-
-    operator VkDisplayEventInfoEXT &()
-    {
-      return *reinterpret_cast<VkDisplayEventInfoEXT*>(this);
-    }
-
-    bool operator==( DisplayEventInfoEXT const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( displayEvent == rhs.displayEvent );
-    }
-
-    bool operator!=( DisplayEventInfoEXT const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eDisplayEventInfoEXT;
-
-  public:
-    const void* pNext = nullptr;
-    DisplayEventTypeEXT displayEvent;
-  };
-  static_assert( sizeof( DisplayEventInfoEXT ) == sizeof( VkDisplayEventInfoEXT ), "struct and wrapper have different size!" );
-
-  enum class PeerMemoryFeatureFlagBits
-  {
-    eCopySrc = VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT,
-    eCopySrcKHR = VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT,
-    eCopyDst = VK_PEER_MEMORY_FEATURE_COPY_DST_BIT,
-    eCopyDstKHR = VK_PEER_MEMORY_FEATURE_COPY_DST_BIT,
-    eGenericSrc = VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT,
-    eGenericSrcKHR = VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT,
-    eGenericDst = VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT,
-    eGenericDstKHR = VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT
-  };
-
-  using PeerMemoryFeatureFlags = Flags<PeerMemoryFeatureFlagBits, VkPeerMemoryFeatureFlags>;
-
-  VULKAN_HPP_INLINE PeerMemoryFeatureFlags operator|( PeerMemoryFeatureFlagBits bit0, PeerMemoryFeatureFlagBits bit1 )
-  {
-    return PeerMemoryFeatureFlags( bit0 ) | bit1;
-  }
-
-  VULKAN_HPP_INLINE PeerMemoryFeatureFlags operator~( PeerMemoryFeatureFlagBits bits )
-  {
-    return ~( PeerMemoryFeatureFlags( bits ) );
-  }
-
-  template <> struct FlagTraits<PeerMemoryFeatureFlagBits>
-  {
-    enum
-    {
-      allFlags = VkFlags(PeerMemoryFeatureFlagBits::eCopySrc) | VkFlags(PeerMemoryFeatureFlagBits::eCopyDst) | VkFlags(PeerMemoryFeatureFlagBits::eGenericSrc) | VkFlags(PeerMemoryFeatureFlagBits::eGenericDst)
-    };
-  };
-
-  using PeerMemoryFeatureFlagsKHR = PeerMemoryFeatureFlags;
-
-  enum class MemoryAllocateFlagBits
-  {
-    eDeviceMask = VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT,
-    eDeviceMaskKHR = VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT
-  };
-
-  using MemoryAllocateFlags = Flags<MemoryAllocateFlagBits, VkMemoryAllocateFlags>;
-
-  VULKAN_HPP_INLINE MemoryAllocateFlags operator|( MemoryAllocateFlagBits bit0, MemoryAllocateFlagBits bit1 )
-  {
-    return MemoryAllocateFlags( bit0 ) | bit1;
-  }
-
-  VULKAN_HPP_INLINE MemoryAllocateFlags operator~( MemoryAllocateFlagBits bits )
-  {
-    return ~( MemoryAllocateFlags( bits ) );
-  }
-
-  template <> struct FlagTraits<MemoryAllocateFlagBits>
-  {
-    enum
-    {
-      allFlags = VkFlags(MemoryAllocateFlagBits::eDeviceMask)
-    };
-  };
-
-  using MemoryAllocateFlagsKHR = MemoryAllocateFlags;
-
-  struct MemoryAllocateFlagsInfo
-  {
-    MemoryAllocateFlagsInfo( MemoryAllocateFlags flags_ = MemoryAllocateFlags(),
-                             uint32_t deviceMask_ = 0 )
-      : flags( flags_ )
-      , deviceMask( deviceMask_ )
-    {
-    }
-
-    MemoryAllocateFlagsInfo( VkMemoryAllocateFlagsInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( MemoryAllocateFlagsInfo ) );
-    }
-
-    MemoryAllocateFlagsInfo& operator=( VkMemoryAllocateFlagsInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( MemoryAllocateFlagsInfo ) );
-      return *this;
-    }
-    MemoryAllocateFlagsInfo& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    MemoryAllocateFlagsInfo& setFlags( MemoryAllocateFlags flags_ )
-    {
-      flags = flags_;
-      return *this;
-    }
-
-    MemoryAllocateFlagsInfo& setDeviceMask( uint32_t deviceMask_ )
-    {
-      deviceMask = deviceMask_;
-      return *this;
-    }
-
-    operator VkMemoryAllocateFlagsInfo const&() const
-    {
-      return *reinterpret_cast<const VkMemoryAllocateFlagsInfo*>(this);
-    }
-
-    operator VkMemoryAllocateFlagsInfo &()
-    {
-      return *reinterpret_cast<VkMemoryAllocateFlagsInfo*>(this);
-    }
-
-    bool operator==( MemoryAllocateFlagsInfo const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( deviceMask == rhs.deviceMask );
-    }
-
-    bool operator!=( MemoryAllocateFlagsInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eMemoryAllocateFlagsInfo;
-
-  public:
-    const void* pNext = nullptr;
-    MemoryAllocateFlags flags;
-    uint32_t deviceMask;
-  };
-  static_assert( sizeof( MemoryAllocateFlagsInfo ) == sizeof( VkMemoryAllocateFlagsInfo ), "struct and wrapper have different size!" );
-
-  using MemoryAllocateFlagsInfoKHR = MemoryAllocateFlagsInfo;
-
-  enum class DeviceGroupPresentModeFlagBitsKHR
-  {
-    eLocal = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR,
-    eRemote = VK_DEVICE_GROUP_PRESENT_MODE_REMOTE_BIT_KHR,
-    eSum = VK_DEVICE_GROUP_PRESENT_MODE_SUM_BIT_KHR,
-    eLocalMultiDevice = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_MULTI_DEVICE_BIT_KHR
-  };
-
-  using DeviceGroupPresentModeFlagsKHR = Flags<DeviceGroupPresentModeFlagBitsKHR, VkDeviceGroupPresentModeFlagsKHR>;
-
-  VULKAN_HPP_INLINE DeviceGroupPresentModeFlagsKHR operator|( DeviceGroupPresentModeFlagBitsKHR bit0, DeviceGroupPresentModeFlagBitsKHR bit1 )
-  {
-    return DeviceGroupPresentModeFlagsKHR( bit0 ) | bit1;
-  }
-
-  VULKAN_HPP_INLINE DeviceGroupPresentModeFlagsKHR operator~( DeviceGroupPresentModeFlagBitsKHR bits )
-  {
-    return ~( DeviceGroupPresentModeFlagsKHR( bits ) );
-  }
-
-  template <> struct FlagTraits<DeviceGroupPresentModeFlagBitsKHR>
-  {
-    enum
-    {
-      allFlags = VkFlags(DeviceGroupPresentModeFlagBitsKHR::eLocal) | VkFlags(DeviceGroupPresentModeFlagBitsKHR::eRemote) | VkFlags(DeviceGroupPresentModeFlagBitsKHR::eSum) | VkFlags(DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice)
-    };
-  };
-
-  struct DeviceGroupPresentCapabilitiesKHR
-  {
-    operator VkDeviceGroupPresentCapabilitiesKHR const&() const
-    {
-      return *reinterpret_cast<const VkDeviceGroupPresentCapabilitiesKHR*>(this);
-    }
-
-    operator VkDeviceGroupPresentCapabilitiesKHR &()
-    {
-      return *reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR*>(this);
-    }
-
-    bool operator==( DeviceGroupPresentCapabilitiesKHR const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( memcmp( presentMask, rhs.presentMask, VK_MAX_DEVICE_GROUP_SIZE * sizeof( uint32_t ) ) == 0 )
-          && ( modes == rhs.modes );
-    }
-
-    bool operator!=( DeviceGroupPresentCapabilitiesKHR const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eDeviceGroupPresentCapabilitiesKHR;
-
-  public:
-    const void* pNext = nullptr;
-    uint32_t presentMask[VK_MAX_DEVICE_GROUP_SIZE];
-    DeviceGroupPresentModeFlagsKHR modes;
-  };
-  static_assert( sizeof( DeviceGroupPresentCapabilitiesKHR ) == sizeof( VkDeviceGroupPresentCapabilitiesKHR ), "struct and wrapper have different size!" );
+  using DeviceGroupDeviceCreateInfoKHR = DeviceGroupDeviceCreateInfo;
 
   struct DeviceGroupPresentInfoKHR
   {
-    DeviceGroupPresentInfoKHR( uint32_t swapchainCount_ = 0,
-                               const uint32_t* pDeviceMasks_ = nullptr,
-                               DeviceGroupPresentModeFlagBitsKHR mode_ = DeviceGroupPresentModeFlagBitsKHR::eLocal )
-      : swapchainCount( swapchainCount_ )
-      , pDeviceMasks( pDeviceMasks_ )
-      , mode( mode_ )
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupPresentInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DeviceGroupPresentInfoKHR(uint32_t swapchainCount_ = {}, const uint32_t* pDeviceMasks_ = {}, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ = VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal) VULKAN_HPP_NOEXCEPT
+    : swapchainCount( swapchainCount_ ), pDeviceMasks( pDeviceMasks_ ), mode( mode_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DeviceGroupPresentInfoKHR( DeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DeviceGroupPresentInfoKHR( VkDeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
     {
+      *this = rhs;
     }
 
-    DeviceGroupPresentInfoKHR( VkDeviceGroupPresentInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DeviceGroupPresentInfoKHR ) );
-    }
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    DeviceGroupPresentInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceMasks_, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ = VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal )
+    : swapchainCount( static_cast<uint32_t>( deviceMasks_.size() ) ), pDeviceMasks( deviceMasks_.data() ), mode( mode_ )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    DeviceGroupPresentInfoKHR& operator=( VkDeviceGroupPresentInfoKHR const & rhs )
+    DeviceGroupPresentInfoKHR & operator=( VkDeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      memcpy( this, &rhs, sizeof( DeviceGroupPresentInfoKHR ) );
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupPresentInfoKHR const *>( &rhs );
       return *this;
     }
-    DeviceGroupPresentInfoKHR& setPNext( const void* pNext_ )
+
+    DeviceGroupPresentInfoKHR & operator=( DeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DeviceGroupPresentInfoKHR ) );
+      return *this;
+    }
+
+    DeviceGroupPresentInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
       pNext = pNext_;
       return *this;
     }
 
-    DeviceGroupPresentInfoKHR& setSwapchainCount( uint32_t swapchainCount_ )
+    DeviceGroupPresentInfoKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT
     {
       swapchainCount = swapchainCount_;
       return *this;
     }
 
-    DeviceGroupPresentInfoKHR& setPDeviceMasks( const uint32_t* pDeviceMasks_ )
+    DeviceGroupPresentInfoKHR & setPDeviceMasks( const uint32_t* pDeviceMasks_ ) VULKAN_HPP_NOEXCEPT
     {
       pDeviceMasks = pDeviceMasks_;
       return *this;
     }
 
-    DeviceGroupPresentInfoKHR& setMode( DeviceGroupPresentModeFlagBitsKHR mode_ )
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    DeviceGroupPresentInfoKHR & setDeviceMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceMasks_ ) VULKAN_HPP_NOEXCEPT
+    {
+      swapchainCount = static_cast<uint32_t>( deviceMasks_.size() );
+      pDeviceMasks = deviceMasks_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    DeviceGroupPresentInfoKHR & setMode( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ ) VULKAN_HPP_NOEXCEPT
     {
       mode = mode_;
       return *this;
     }
 
-    operator VkDeviceGroupPresentInfoKHR const&() const
+
+    operator VkDeviceGroupPresentInfoKHR const&() const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkDeviceGroupPresentInfoKHR*>(this);
+      return *reinterpret_cast<const VkDeviceGroupPresentInfoKHR*>( this );
     }
 
-    operator VkDeviceGroupPresentInfoKHR &()
+    operator VkDeviceGroupPresentInfoKHR &() VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkDeviceGroupPresentInfoKHR*>(this);
+      return *reinterpret_cast<VkDeviceGroupPresentInfoKHR*>( this );
     }
 
-    bool operator==( DeviceGroupPresentInfoKHR const& rhs ) const
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DeviceGroupPresentInfoKHR const& ) const = default;
+#else
+    bool operator==( DeviceGroupPresentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
@@ -34180,1620 +55054,18074 @@
           && ( mode == rhs.mode );
     }
 
-    bool operator!=( DeviceGroupPresentInfoKHR const& rhs ) const
+    bool operator!=( DeviceGroupPresentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
+#endif
 
-  private:
-    StructureType sType = StructureType::eDeviceGroupPresentInfoKHR;
+
 
   public:
-    const void* pNext = nullptr;
-    uint32_t swapchainCount;
-    const uint32_t* pDeviceMasks;
-    DeviceGroupPresentModeFlagBitsKHR mode;
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupPresentInfoKHR;
+    const void* pNext = {};
+    uint32_t swapchainCount = {};
+    const uint32_t* pDeviceMasks = {};
+    VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode = VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal;
+
   };
   static_assert( sizeof( DeviceGroupPresentInfoKHR ) == sizeof( VkDeviceGroupPresentInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DeviceGroupPresentInfoKHR>::value, "struct wrapper is not a standard layout!" );
 
-  struct DeviceGroupSwapchainCreateInfoKHR
+  template <>
+  struct CppType<StructureType, StructureType::eDeviceGroupPresentInfoKHR>
   {
-    DeviceGroupSwapchainCreateInfoKHR( DeviceGroupPresentModeFlagsKHR modes_ = DeviceGroupPresentModeFlagsKHR() )
-      : modes( modes_ )
+    using Type = DeviceGroupPresentInfoKHR;
+  };
+
+  struct DeviceGroupRenderPassBeginInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupRenderPassBeginInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DeviceGroupRenderPassBeginInfo(uint32_t deviceMask_ = {}, uint32_t deviceRenderAreaCount_ = {}, const VULKAN_HPP_NAMESPACE::Rect2D* pDeviceRenderAreas_ = {}) VULKAN_HPP_NOEXCEPT
+    : deviceMask( deviceMask_ ), deviceRenderAreaCount( deviceRenderAreaCount_ ), pDeviceRenderAreas( pDeviceRenderAreas_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DeviceGroupRenderPassBeginInfo( DeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DeviceGroupRenderPassBeginInfo( VkDeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
     {
+      *this = rhs;
     }
 
-    DeviceGroupSwapchainCreateInfoKHR( VkDeviceGroupSwapchainCreateInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DeviceGroupSwapchainCreateInfoKHR ) );
-    }
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    DeviceGroupRenderPassBeginInfo( uint32_t deviceMask_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & deviceRenderAreas_ )
+    : deviceMask( deviceMask_ ), deviceRenderAreaCount( static_cast<uint32_t>( deviceRenderAreas_.size() ) ), pDeviceRenderAreas( deviceRenderAreas_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    DeviceGroupSwapchainCreateInfoKHR& operator=( VkDeviceGroupSwapchainCreateInfoKHR const & rhs )
+    DeviceGroupRenderPassBeginInfo & operator=( VkDeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      memcpy( this, &rhs, sizeof( DeviceGroupSwapchainCreateInfoKHR ) );
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupRenderPassBeginInfo const *>( &rhs );
       return *this;
     }
-    DeviceGroupSwapchainCreateInfoKHR& setPNext( const void* pNext_ )
+
+    DeviceGroupRenderPassBeginInfo & operator=( DeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DeviceGroupRenderPassBeginInfo ) );
+      return *this;
+    }
+
+    DeviceGroupRenderPassBeginInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
       pNext = pNext_;
       return *this;
     }
 
-    DeviceGroupSwapchainCreateInfoKHR& setModes( DeviceGroupPresentModeFlagsKHR modes_ )
+    DeviceGroupRenderPassBeginInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      deviceMask = deviceMask_;
+      return *this;
+    }
+
+    DeviceGroupRenderPassBeginInfo & setDeviceRenderAreaCount( uint32_t deviceRenderAreaCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      deviceRenderAreaCount = deviceRenderAreaCount_;
+      return *this;
+    }
+
+    DeviceGroupRenderPassBeginInfo & setPDeviceRenderAreas( const VULKAN_HPP_NAMESPACE::Rect2D* pDeviceRenderAreas_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pDeviceRenderAreas = pDeviceRenderAreas_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    DeviceGroupRenderPassBeginInfo & setDeviceRenderAreas( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & deviceRenderAreas_ ) VULKAN_HPP_NOEXCEPT
+    {
+      deviceRenderAreaCount = static_cast<uint32_t>( deviceRenderAreas_.size() );
+      pDeviceRenderAreas = deviceRenderAreas_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkDeviceGroupRenderPassBeginInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceGroupRenderPassBeginInfo*>( this );
+    }
+
+    operator VkDeviceGroupRenderPassBeginInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceGroupRenderPassBeginInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DeviceGroupRenderPassBeginInfo const& ) const = default;
+#else
+    bool operator==( DeviceGroupRenderPassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( deviceMask == rhs.deviceMask )
+          && ( deviceRenderAreaCount == rhs.deviceRenderAreaCount )
+          && ( pDeviceRenderAreas == rhs.pDeviceRenderAreas );
+    }
+
+    bool operator!=( DeviceGroupRenderPassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupRenderPassBeginInfo;
+    const void* pNext = {};
+    uint32_t deviceMask = {};
+    uint32_t deviceRenderAreaCount = {};
+    const VULKAN_HPP_NAMESPACE::Rect2D* pDeviceRenderAreas = {};
+
+  };
+  static_assert( sizeof( DeviceGroupRenderPassBeginInfo ) == sizeof( VkDeviceGroupRenderPassBeginInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DeviceGroupRenderPassBeginInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eDeviceGroupRenderPassBeginInfo>
+  {
+    using Type = DeviceGroupRenderPassBeginInfo;
+  };
+  using DeviceGroupRenderPassBeginInfoKHR = DeviceGroupRenderPassBeginInfo;
+
+  struct DeviceGroupSubmitInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupSubmitInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DeviceGroupSubmitInfo(uint32_t waitSemaphoreCount_ = {}, const uint32_t* pWaitSemaphoreDeviceIndices_ = {}, uint32_t commandBufferCount_ = {}, const uint32_t* pCommandBufferDeviceMasks_ = {}, uint32_t signalSemaphoreCount_ = {}, const uint32_t* pSignalSemaphoreDeviceIndices_ = {}) VULKAN_HPP_NOEXCEPT
+    : waitSemaphoreCount( waitSemaphoreCount_ ), pWaitSemaphoreDeviceIndices( pWaitSemaphoreDeviceIndices_ ), commandBufferCount( commandBufferCount_ ), pCommandBufferDeviceMasks( pCommandBufferDeviceMasks_ ), signalSemaphoreCount( signalSemaphoreCount_ ), pSignalSemaphoreDeviceIndices( pSignalSemaphoreDeviceIndices_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DeviceGroupSubmitInfo( DeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DeviceGroupSubmitInfo( VkDeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    DeviceGroupSubmitInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & waitSemaphoreDeviceIndices_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & commandBufferDeviceMasks_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & signalSemaphoreDeviceIndices_ = {} )
+    : waitSemaphoreCount( static_cast<uint32_t>( waitSemaphoreDeviceIndices_.size() ) ), pWaitSemaphoreDeviceIndices( waitSemaphoreDeviceIndices_.data() ), commandBufferCount( static_cast<uint32_t>( commandBufferDeviceMasks_.size() ) ), pCommandBufferDeviceMasks( commandBufferDeviceMasks_.data() ), signalSemaphoreCount( static_cast<uint32_t>( signalSemaphoreDeviceIndices_.size() ) ), pSignalSemaphoreDeviceIndices( signalSemaphoreDeviceIndices_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DeviceGroupSubmitInfo & operator=( VkDeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupSubmitInfo const *>( &rhs );
+      return *this;
+    }
+
+    DeviceGroupSubmitInfo & operator=( DeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DeviceGroupSubmitInfo ) );
+      return *this;
+    }
+
+    DeviceGroupSubmitInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DeviceGroupSubmitInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      waitSemaphoreCount = waitSemaphoreCount_;
+      return *this;
+    }
+
+    DeviceGroupSubmitInfo & setPWaitSemaphoreDeviceIndices( const uint32_t* pWaitSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pWaitSemaphoreDeviceIndices = pWaitSemaphoreDeviceIndices_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    DeviceGroupSubmitInfo & setWaitSemaphoreDeviceIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & waitSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
+    {
+      waitSemaphoreCount = static_cast<uint32_t>( waitSemaphoreDeviceIndices_.size() );
+      pWaitSemaphoreDeviceIndices = waitSemaphoreDeviceIndices_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    DeviceGroupSubmitInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      commandBufferCount = commandBufferCount_;
+      return *this;
+    }
+
+    DeviceGroupSubmitInfo & setPCommandBufferDeviceMasks( const uint32_t* pCommandBufferDeviceMasks_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pCommandBufferDeviceMasks = pCommandBufferDeviceMasks_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    DeviceGroupSubmitInfo & setCommandBufferDeviceMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & commandBufferDeviceMasks_ ) VULKAN_HPP_NOEXCEPT
+    {
+      commandBufferCount = static_cast<uint32_t>( commandBufferDeviceMasks_.size() );
+      pCommandBufferDeviceMasks = commandBufferDeviceMasks_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    DeviceGroupSubmitInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      signalSemaphoreCount = signalSemaphoreCount_;
+      return *this;
+    }
+
+    DeviceGroupSubmitInfo & setPSignalSemaphoreDeviceIndices( const uint32_t* pSignalSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pSignalSemaphoreDeviceIndices = pSignalSemaphoreDeviceIndices_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    DeviceGroupSubmitInfo & setSignalSemaphoreDeviceIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & signalSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
+    {
+      signalSemaphoreCount = static_cast<uint32_t>( signalSemaphoreDeviceIndices_.size() );
+      pSignalSemaphoreDeviceIndices = signalSemaphoreDeviceIndices_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkDeviceGroupSubmitInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceGroupSubmitInfo*>( this );
+    }
+
+    operator VkDeviceGroupSubmitInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceGroupSubmitInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DeviceGroupSubmitInfo const& ) const = default;
+#else
+    bool operator==( DeviceGroupSubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( waitSemaphoreCount == rhs.waitSemaphoreCount )
+          && ( pWaitSemaphoreDeviceIndices == rhs.pWaitSemaphoreDeviceIndices )
+          && ( commandBufferCount == rhs.commandBufferCount )
+          && ( pCommandBufferDeviceMasks == rhs.pCommandBufferDeviceMasks )
+          && ( signalSemaphoreCount == rhs.signalSemaphoreCount )
+          && ( pSignalSemaphoreDeviceIndices == rhs.pSignalSemaphoreDeviceIndices );
+    }
+
+    bool operator!=( DeviceGroupSubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupSubmitInfo;
+    const void* pNext = {};
+    uint32_t waitSemaphoreCount = {};
+    const uint32_t* pWaitSemaphoreDeviceIndices = {};
+    uint32_t commandBufferCount = {};
+    const uint32_t* pCommandBufferDeviceMasks = {};
+    uint32_t signalSemaphoreCount = {};
+    const uint32_t* pSignalSemaphoreDeviceIndices = {};
+
+  };
+  static_assert( sizeof( DeviceGroupSubmitInfo ) == sizeof( VkDeviceGroupSubmitInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DeviceGroupSubmitInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eDeviceGroupSubmitInfo>
+  {
+    using Type = DeviceGroupSubmitInfo;
+  };
+  using DeviceGroupSubmitInfoKHR = DeviceGroupSubmitInfo;
+
+  struct DeviceGroupSwapchainCreateInfoKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupSwapchainCreateInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DeviceGroupSwapchainCreateInfoKHR(VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ = {}) VULKAN_HPP_NOEXCEPT
+    : modes( modes_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DeviceGroupSwapchainCreateInfoKHR( DeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DeviceGroupSwapchainCreateInfoKHR( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DeviceGroupSwapchainCreateInfoKHR & operator=( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupSwapchainCreateInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+    DeviceGroupSwapchainCreateInfoKHR & operator=( DeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DeviceGroupSwapchainCreateInfoKHR ) );
+      return *this;
+    }
+
+    DeviceGroupSwapchainCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DeviceGroupSwapchainCreateInfoKHR & setModes( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ ) VULKAN_HPP_NOEXCEPT
     {
       modes = modes_;
       return *this;
     }
 
-    operator VkDeviceGroupSwapchainCreateInfoKHR const&() const
+
+    operator VkDeviceGroupSwapchainCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkDeviceGroupSwapchainCreateInfoKHR*>(this);
+      return *reinterpret_cast<const VkDeviceGroupSwapchainCreateInfoKHR*>( this );
     }
 
-    operator VkDeviceGroupSwapchainCreateInfoKHR &()
+    operator VkDeviceGroupSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkDeviceGroupSwapchainCreateInfoKHR*>(this);
+      return *reinterpret_cast<VkDeviceGroupSwapchainCreateInfoKHR*>( this );
     }
 
-    bool operator==( DeviceGroupSwapchainCreateInfoKHR const& rhs ) const
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DeviceGroupSwapchainCreateInfoKHR const& ) const = default;
+#else
+    bool operator==( DeviceGroupSwapchainCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
           && ( modes == rhs.modes );
     }
 
-    bool operator!=( DeviceGroupSwapchainCreateInfoKHR const& rhs ) const
+    bool operator!=( DeviceGroupSwapchainCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
+#endif
 
-  private:
-    StructureType sType = StructureType::eDeviceGroupSwapchainCreateInfoKHR;
+
 
   public:
-    const void* pNext = nullptr;
-    DeviceGroupPresentModeFlagsKHR modes;
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupSwapchainCreateInfoKHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes = {};
+
   };
   static_assert( sizeof( DeviceGroupSwapchainCreateInfoKHR ) == sizeof( VkDeviceGroupSwapchainCreateInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DeviceGroupSwapchainCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
 
-  enum class SwapchainCreateFlagBitsKHR
+  template <>
+  struct CppType<StructureType, StructureType::eDeviceGroupSwapchainCreateInfoKHR>
   {
-    eSplitInstanceBindRegions = VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR,
-    eProtected = VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR
+    using Type = DeviceGroupSwapchainCreateInfoKHR;
   };
 
-  using SwapchainCreateFlagsKHR = Flags<SwapchainCreateFlagBitsKHR, VkSwapchainCreateFlagsKHR>;
-
-  VULKAN_HPP_INLINE SwapchainCreateFlagsKHR operator|( SwapchainCreateFlagBitsKHR bit0, SwapchainCreateFlagBitsKHR bit1 )
+  struct DeviceMemoryOverallocationCreateInfoAMD
   {
-    return SwapchainCreateFlagsKHR( bit0 ) | bit1;
-  }
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceMemoryOverallocationCreateInfoAMD;
 
-  VULKAN_HPP_INLINE SwapchainCreateFlagsKHR operator~( SwapchainCreateFlagBitsKHR bits )
-  {
-    return ~( SwapchainCreateFlagsKHR( bits ) );
-  }
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DeviceMemoryOverallocationCreateInfoAMD(VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior_ = VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD::eDefault) VULKAN_HPP_NOEXCEPT
+    : overallocationBehavior( overallocationBehavior_ )
+    {}
 
-  template <> struct FlagTraits<SwapchainCreateFlagBitsKHR>
-  {
-    enum
+    VULKAN_HPP_CONSTEXPR DeviceMemoryOverallocationCreateInfoAMD( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DeviceMemoryOverallocationCreateInfoAMD( VkDeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      allFlags = VkFlags(SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions) | VkFlags(SwapchainCreateFlagBitsKHR::eProtected)
-    };
-  };
-
-  struct SwapchainCreateInfoKHR
-  {
-    SwapchainCreateInfoKHR( SwapchainCreateFlagsKHR flags_ = SwapchainCreateFlagsKHR(),
-                            SurfaceKHR surface_ = SurfaceKHR(),
-                            uint32_t minImageCount_ = 0,
-                            Format imageFormat_ = Format::eUndefined,
-                            ColorSpaceKHR imageColorSpace_ = ColorSpaceKHR::eSrgbNonlinear,
-                            Extent2D imageExtent_ = Extent2D(),
-                            uint32_t imageArrayLayers_ = 0,
-                            ImageUsageFlags imageUsage_ = ImageUsageFlags(),
-                            SharingMode imageSharingMode_ = SharingMode::eExclusive,
-                            uint32_t queueFamilyIndexCount_ = 0,
-                            const uint32_t* pQueueFamilyIndices_ = nullptr,
-                            SurfaceTransformFlagBitsKHR preTransform_ = SurfaceTransformFlagBitsKHR::eIdentity,
-                            CompositeAlphaFlagBitsKHR compositeAlpha_ = CompositeAlphaFlagBitsKHR::eOpaque,
-                            PresentModeKHR presentMode_ = PresentModeKHR::eImmediate,
-                            Bool32 clipped_ = 0,
-                            SwapchainKHR oldSwapchain_ = SwapchainKHR() )
-      : flags( flags_ )
-      , surface( surface_ )
-      , minImageCount( minImageCount_ )
-      , imageFormat( imageFormat_ )
-      , imageColorSpace( imageColorSpace_ )
-      , imageExtent( imageExtent_ )
-      , imageArrayLayers( imageArrayLayers_ )
-      , imageUsage( imageUsage_ )
-      , imageSharingMode( imageSharingMode_ )
-      , queueFamilyIndexCount( queueFamilyIndexCount_ )
-      , pQueueFamilyIndices( pQueueFamilyIndices_ )
-      , preTransform( preTransform_ )
-      , compositeAlpha( compositeAlpha_ )
-      , presentMode( presentMode_ )
-      , clipped( clipped_ )
-      , oldSwapchain( oldSwapchain_ )
-    {
+      *this = rhs;
     }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    SwapchainCreateInfoKHR( VkSwapchainCreateInfoKHR const & rhs )
+    DeviceMemoryOverallocationCreateInfoAMD & operator=( VkDeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      memcpy( this, &rhs, sizeof( SwapchainCreateInfoKHR ) );
-    }
-
-    SwapchainCreateInfoKHR& operator=( VkSwapchainCreateInfoKHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( SwapchainCreateInfoKHR ) );
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceMemoryOverallocationCreateInfoAMD const *>( &rhs );
       return *this;
     }
-    SwapchainCreateInfoKHR& setPNext( const void* pNext_ )
+
+    DeviceMemoryOverallocationCreateInfoAMD & operator=( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DeviceMemoryOverallocationCreateInfoAMD ) );
+      return *this;
+    }
+
+    DeviceMemoryOverallocationCreateInfoAMD & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
       pNext = pNext_;
       return *this;
     }
 
-    SwapchainCreateInfoKHR& setFlags( SwapchainCreateFlagsKHR flags_ )
+    DeviceMemoryOverallocationCreateInfoAMD & setOverallocationBehavior( VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior_ ) VULKAN_HPP_NOEXCEPT
+    {
+      overallocationBehavior = overallocationBehavior_;
+      return *this;
+    }
+
+
+    operator VkDeviceMemoryOverallocationCreateInfoAMD const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceMemoryOverallocationCreateInfoAMD*>( this );
+    }
+
+    operator VkDeviceMemoryOverallocationCreateInfoAMD &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceMemoryOverallocationCreateInfoAMD*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DeviceMemoryOverallocationCreateInfoAMD const& ) const = default;
+#else
+    bool operator==( DeviceMemoryOverallocationCreateInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( overallocationBehavior == rhs.overallocationBehavior );
+    }
+
+    bool operator!=( DeviceMemoryOverallocationCreateInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryOverallocationCreateInfoAMD;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior = VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD::eDefault;
+
+  };
+  static_assert( sizeof( DeviceMemoryOverallocationCreateInfoAMD ) == sizeof( VkDeviceMemoryOverallocationCreateInfoAMD ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DeviceMemoryOverallocationCreateInfoAMD>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eDeviceMemoryOverallocationCreateInfoAMD>
+  {
+    using Type = DeviceMemoryOverallocationCreateInfoAMD;
+  };
+
+  struct DeviceMemoryReportCallbackDataEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceMemoryReportCallbackDataEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DeviceMemoryReportCallbackDataEXT(VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags_ = {}, VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT type_ = VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT::eAllocate, uint64_t memoryObjectId_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown, uint64_t objectHandle_ = {}, uint32_t heapIndex_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), type( type_ ), memoryObjectId( memoryObjectId_ ), size( size_ ), objectType( objectType_ ), objectHandle( objectHandle_ ), heapIndex( heapIndex_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DeviceMemoryReportCallbackDataEXT( DeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DeviceMemoryReportCallbackDataEXT( VkDeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DeviceMemoryReportCallbackDataEXT & operator=( VkDeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceMemoryReportCallbackDataEXT const *>( &rhs );
+      return *this;
+    }
+
+    DeviceMemoryReportCallbackDataEXT & operator=( DeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DeviceMemoryReportCallbackDataEXT ) );
+      return *this;
+    }
+
+
+    operator VkDeviceMemoryReportCallbackDataEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceMemoryReportCallbackDataEXT*>( this );
+    }
+
+    operator VkDeviceMemoryReportCallbackDataEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceMemoryReportCallbackDataEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DeviceMemoryReportCallbackDataEXT const& ) const = default;
+#else
+    bool operator==( DeviceMemoryReportCallbackDataEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( type == rhs.type )
+          && ( memoryObjectId == rhs.memoryObjectId )
+          && ( size == rhs.size )
+          && ( objectType == rhs.objectType )
+          && ( objectHandle == rhs.objectHandle )
+          && ( heapIndex == rhs.heapIndex );
+    }
+
+    bool operator!=( DeviceMemoryReportCallbackDataEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryReportCallbackDataEXT;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags = {};
+    VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT type = VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT::eAllocate;
+    uint64_t memoryObjectId = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize size = {};
+    VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown;
+    uint64_t objectHandle = {};
+    uint32_t heapIndex = {};
+
+  };
+  static_assert( sizeof( DeviceMemoryReportCallbackDataEXT ) == sizeof( VkDeviceMemoryReportCallbackDataEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DeviceMemoryReportCallbackDataEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eDeviceMemoryReportCallbackDataEXT>
+  {
+    using Type = DeviceMemoryReportCallbackDataEXT;
+  };
+
+  struct DevicePrivateDataCreateInfoEXT
+  {
+    static const bool allowDuplicate = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDevicePrivateDataCreateInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DevicePrivateDataCreateInfoEXT(uint32_t privateDataSlotRequestCount_ = {}) VULKAN_HPP_NOEXCEPT
+    : privateDataSlotRequestCount( privateDataSlotRequestCount_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DevicePrivateDataCreateInfoEXT( DevicePrivateDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DevicePrivateDataCreateInfoEXT( VkDevicePrivateDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DevicePrivateDataCreateInfoEXT & operator=( VkDevicePrivateDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DevicePrivateDataCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+    DevicePrivateDataCreateInfoEXT & operator=( DevicePrivateDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DevicePrivateDataCreateInfoEXT ) );
+      return *this;
+    }
+
+    DevicePrivateDataCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DevicePrivateDataCreateInfoEXT & setPrivateDataSlotRequestCount( uint32_t privateDataSlotRequestCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      privateDataSlotRequestCount = privateDataSlotRequestCount_;
+      return *this;
+    }
+
+
+    operator VkDevicePrivateDataCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDevicePrivateDataCreateInfoEXT*>( this );
+    }
+
+    operator VkDevicePrivateDataCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDevicePrivateDataCreateInfoEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DevicePrivateDataCreateInfoEXT const& ) const = default;
+#else
+    bool operator==( DevicePrivateDataCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( privateDataSlotRequestCount == rhs.privateDataSlotRequestCount );
+    }
+
+    bool operator!=( DevicePrivateDataCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDevicePrivateDataCreateInfoEXT;
+    const void* pNext = {};
+    uint32_t privateDataSlotRequestCount = {};
+
+  };
+  static_assert( sizeof( DevicePrivateDataCreateInfoEXT ) == sizeof( VkDevicePrivateDataCreateInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DevicePrivateDataCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eDevicePrivateDataCreateInfoEXT>
+  {
+    using Type = DevicePrivateDataCreateInfoEXT;
+  };
+
+  struct DeviceQueueGlobalPriorityCreateInfoEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceQueueGlobalPriorityCreateInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DeviceQueueGlobalPriorityCreateInfoEXT(VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT globalPriority_ = VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow) VULKAN_HPP_NOEXCEPT
+    : globalPriority( globalPriority_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DeviceQueueGlobalPriorityCreateInfoEXT( DeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DeviceQueueGlobalPriorityCreateInfoEXT( VkDeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DeviceQueueGlobalPriorityCreateInfoEXT & operator=( VkDeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+    DeviceQueueGlobalPriorityCreateInfoEXT & operator=( DeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DeviceQueueGlobalPriorityCreateInfoEXT ) );
+      return *this;
+    }
+
+    DeviceQueueGlobalPriorityCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DeviceQueueGlobalPriorityCreateInfoEXT & setGlobalPriority( VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT globalPriority_ ) VULKAN_HPP_NOEXCEPT
+    {
+      globalPriority = globalPriority_;
+      return *this;
+    }
+
+
+    operator VkDeviceQueueGlobalPriorityCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceQueueGlobalPriorityCreateInfoEXT*>( this );
+    }
+
+    operator VkDeviceQueueGlobalPriorityCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceQueueGlobalPriorityCreateInfoEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DeviceQueueGlobalPriorityCreateInfoEXT const& ) const = default;
+#else
+    bool operator==( DeviceQueueGlobalPriorityCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( globalPriority == rhs.globalPriority );
+    }
+
+    bool operator!=( DeviceQueueGlobalPriorityCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueGlobalPriorityCreateInfoEXT;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT globalPriority = VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow;
+
+  };
+  static_assert( sizeof( DeviceQueueGlobalPriorityCreateInfoEXT ) == sizeof( VkDeviceQueueGlobalPriorityCreateInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DeviceQueueGlobalPriorityCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eDeviceQueueGlobalPriorityCreateInfoEXT>
+  {
+    using Type = DeviceQueueGlobalPriorityCreateInfoEXT;
+  };
+
+#ifdef VK_USE_PLATFORM_DIRECTFB_EXT
+  struct DirectFBSurfaceCreateInfoEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDirectfbSurfaceCreateInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DirectFBSurfaceCreateInfoEXT(VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagsEXT flags_ = {}, IDirectFB* dfb_ = {}, IDirectFBSurface* surface_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), dfb( dfb_ ), surface( surface_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DirectFBSurfaceCreateInfoEXT( DirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DirectFBSurfaceCreateInfoEXT( VkDirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DirectFBSurfaceCreateInfoEXT & operator=( VkDirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+    DirectFBSurfaceCreateInfoEXT & operator=( DirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DirectFBSurfaceCreateInfoEXT ) );
+      return *this;
+    }
+
+    DirectFBSurfaceCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DirectFBSurfaceCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
     {
       flags = flags_;
       return *this;
     }
 
-    SwapchainCreateInfoKHR& setSurface( SurfaceKHR surface_ )
+    DirectFBSurfaceCreateInfoEXT & setDfb( IDirectFB* dfb_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dfb = dfb_;
+      return *this;
+    }
+
+    DirectFBSurfaceCreateInfoEXT & setSurface( IDirectFBSurface* surface_ ) VULKAN_HPP_NOEXCEPT
     {
       surface = surface_;
       return *this;
     }
 
-    SwapchainCreateInfoKHR& setMinImageCount( uint32_t minImageCount_ )
+
+    operator VkDirectFBSurfaceCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
     {
-      minImageCount = minImageCount_;
-      return *this;
+      return *reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT*>( this );
     }
 
-    SwapchainCreateInfoKHR& setImageFormat( Format imageFormat_ )
+    operator VkDirectFBSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
     {
-      imageFormat = imageFormat_;
-      return *this;
+      return *reinterpret_cast<VkDirectFBSurfaceCreateInfoEXT*>( this );
     }
 
-    SwapchainCreateInfoKHR& setImageColorSpace( ColorSpaceKHR imageColorSpace_ )
-    {
-      imageColorSpace = imageColorSpace_;
-      return *this;
-    }
 
-    SwapchainCreateInfoKHR& setImageExtent( Extent2D imageExtent_ )
-    {
-      imageExtent = imageExtent_;
-      return *this;
-    }
-
-    SwapchainCreateInfoKHR& setImageArrayLayers( uint32_t imageArrayLayers_ )
-    {
-      imageArrayLayers = imageArrayLayers_;
-      return *this;
-    }
-
-    SwapchainCreateInfoKHR& setImageUsage( ImageUsageFlags imageUsage_ )
-    {
-      imageUsage = imageUsage_;
-      return *this;
-    }
-
-    SwapchainCreateInfoKHR& setImageSharingMode( SharingMode imageSharingMode_ )
-    {
-      imageSharingMode = imageSharingMode_;
-      return *this;
-    }
-
-    SwapchainCreateInfoKHR& setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ )
-    {
-      queueFamilyIndexCount = queueFamilyIndexCount_;
-      return *this;
-    }
-
-    SwapchainCreateInfoKHR& setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ )
-    {
-      pQueueFamilyIndices = pQueueFamilyIndices_;
-      return *this;
-    }
-
-    SwapchainCreateInfoKHR& setPreTransform( SurfaceTransformFlagBitsKHR preTransform_ )
-    {
-      preTransform = preTransform_;
-      return *this;
-    }
-
-    SwapchainCreateInfoKHR& setCompositeAlpha( CompositeAlphaFlagBitsKHR compositeAlpha_ )
-    {
-      compositeAlpha = compositeAlpha_;
-      return *this;
-    }
-
-    SwapchainCreateInfoKHR& setPresentMode( PresentModeKHR presentMode_ )
-    {
-      presentMode = presentMode_;
-      return *this;
-    }
-
-    SwapchainCreateInfoKHR& setClipped( Bool32 clipped_ )
-    {
-      clipped = clipped_;
-      return *this;
-    }
-
-    SwapchainCreateInfoKHR& setOldSwapchain( SwapchainKHR oldSwapchain_ )
-    {
-      oldSwapchain = oldSwapchain_;
-      return *this;
-    }
-
-    operator VkSwapchainCreateInfoKHR const&() const
-    {
-      return *reinterpret_cast<const VkSwapchainCreateInfoKHR*>(this);
-    }
-
-    operator VkSwapchainCreateInfoKHR &()
-    {
-      return *reinterpret_cast<VkSwapchainCreateInfoKHR*>(this);
-    }
-
-    bool operator==( SwapchainCreateInfoKHR const& rhs ) const
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DirectFBSurfaceCreateInfoEXT const& ) const = default;
+#else
+    bool operator==( DirectFBSurfaceCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
           && ( flags == rhs.flags )
-          && ( surface == rhs.surface )
-          && ( minImageCount == rhs.minImageCount )
-          && ( imageFormat == rhs.imageFormat )
-          && ( imageColorSpace == rhs.imageColorSpace )
-          && ( imageExtent == rhs.imageExtent )
-          && ( imageArrayLayers == rhs.imageArrayLayers )
-          && ( imageUsage == rhs.imageUsage )
-          && ( imageSharingMode == rhs.imageSharingMode )
-          && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount )
-          && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices )
-          && ( preTransform == rhs.preTransform )
-          && ( compositeAlpha == rhs.compositeAlpha )
-          && ( presentMode == rhs.presentMode )
-          && ( clipped == rhs.clipped )
-          && ( oldSwapchain == rhs.oldSwapchain );
+          && ( dfb == rhs.dfb )
+          && ( surface == rhs.surface );
     }
 
-    bool operator!=( SwapchainCreateInfoKHR const& rhs ) const
+    bool operator!=( DirectFBSurfaceCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
+#endif
 
-  private:
-    StructureType sType = StructureType::eSwapchainCreateInfoKHR;
+
 
   public:
-    const void* pNext = nullptr;
-    SwapchainCreateFlagsKHR flags;
-    SurfaceKHR surface;
-    uint32_t minImageCount;
-    Format imageFormat;
-    ColorSpaceKHR imageColorSpace;
-    Extent2D imageExtent;
-    uint32_t imageArrayLayers;
-    ImageUsageFlags imageUsage;
-    SharingMode imageSharingMode;
-    uint32_t queueFamilyIndexCount;
-    const uint32_t* pQueueFamilyIndices;
-    SurfaceTransformFlagBitsKHR preTransform;
-    CompositeAlphaFlagBitsKHR compositeAlpha;
-    PresentModeKHR presentMode;
-    Bool32 clipped;
-    SwapchainKHR oldSwapchain;
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDirectfbSurfaceCreateInfoEXT;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagsEXT flags = {};
+    IDirectFB* dfb = {};
+    IDirectFBSurface* surface = {};
+
   };
-  static_assert( sizeof( SwapchainCreateInfoKHR ) == sizeof( VkSwapchainCreateInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( sizeof( DirectFBSurfaceCreateInfoEXT ) == sizeof( VkDirectFBSurfaceCreateInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DirectFBSurfaceCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
 
-  enum class ViewportCoordinateSwizzleNV
+  template <>
+  struct CppType<StructureType, StructureType::eDirectfbSurfaceCreateInfoEXT>
   {
-    ePositiveX = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV,
-    eNegativeX = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_X_NV,
-    ePositiveY = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Y_NV,
-    eNegativeY = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Y_NV,
-    ePositiveZ = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Z_NV,
-    eNegativeZ = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Z_NV,
-    ePositiveW = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_W_NV,
-    eNegativeW = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV
+    using Type = DirectFBSurfaceCreateInfoEXT;
   };
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
 
-  struct ViewportSwizzleNV
+  struct DispatchIndirectCommand
   {
-    ViewportSwizzleNV( ViewportCoordinateSwizzleNV x_ = ViewportCoordinateSwizzleNV::ePositiveX,
-                       ViewportCoordinateSwizzleNV y_ = ViewportCoordinateSwizzleNV::ePositiveX,
-                       ViewportCoordinateSwizzleNV z_ = ViewportCoordinateSwizzleNV::ePositiveX,
-                       ViewportCoordinateSwizzleNV w_ = ViewportCoordinateSwizzleNV::ePositiveX )
-      : x( x_ )
-      , y( y_ )
-      , z( z_ )
-      , w( w_ )
-    {
-    }
 
-    ViewportSwizzleNV( VkViewportSwizzleNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ViewportSwizzleNV ) );
-    }
 
-    ViewportSwizzleNV& operator=( VkViewportSwizzleNV const & rhs )
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DispatchIndirectCommand(uint32_t x_ = {}, uint32_t y_ = {}, uint32_t z_ = {}) VULKAN_HPP_NOEXCEPT
+    : x( x_ ), y( y_ ), z( z_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DispatchIndirectCommand( DispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DispatchIndirectCommand( VkDispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      memcpy( this, &rhs, sizeof( ViewportSwizzleNV ) );
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DispatchIndirectCommand & operator=( VkDispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DispatchIndirectCommand const *>( &rhs );
       return *this;
     }
-    ViewportSwizzleNV& setX( ViewportCoordinateSwizzleNV x_ )
+
+    DispatchIndirectCommand & operator=( DispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DispatchIndirectCommand ) );
+      return *this;
+    }
+
+    DispatchIndirectCommand & setX( uint32_t x_ ) VULKAN_HPP_NOEXCEPT
     {
       x = x_;
       return *this;
     }
 
-    ViewportSwizzleNV& setY( ViewportCoordinateSwizzleNV y_ )
+    DispatchIndirectCommand & setY( uint32_t y_ ) VULKAN_HPP_NOEXCEPT
     {
       y = y_;
       return *this;
     }
 
-    ViewportSwizzleNV& setZ( ViewportCoordinateSwizzleNV z_ )
+    DispatchIndirectCommand & setZ( uint32_t z_ ) VULKAN_HPP_NOEXCEPT
     {
       z = z_;
       return *this;
     }
 
-    ViewportSwizzleNV& setW( ViewportCoordinateSwizzleNV w_ )
+
+    operator VkDispatchIndirectCommand const&() const VULKAN_HPP_NOEXCEPT
     {
-      w = w_;
-      return *this;
+      return *reinterpret_cast<const VkDispatchIndirectCommand*>( this );
     }
 
-    operator VkViewportSwizzleNV const&() const
+    operator VkDispatchIndirectCommand &() VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkViewportSwizzleNV*>(this);
+      return *reinterpret_cast<VkDispatchIndirectCommand*>( this );
     }
 
-    operator VkViewportSwizzleNV &()
-    {
-      return *reinterpret_cast<VkViewportSwizzleNV*>(this);
-    }
 
-    bool operator==( ViewportSwizzleNV const& rhs ) const
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DispatchIndirectCommand const& ) const = default;
+#else
+    bool operator==( DispatchIndirectCommand const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( x == rhs.x )
           && ( y == rhs.y )
-          && ( z == rhs.z )
-          && ( w == rhs.w );
+          && ( z == rhs.z );
     }
 
-    bool operator!=( ViewportSwizzleNV const& rhs ) const
+    bool operator!=( DispatchIndirectCommand const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
+#endif
 
-    ViewportCoordinateSwizzleNV x;
-    ViewportCoordinateSwizzleNV y;
-    ViewportCoordinateSwizzleNV z;
-    ViewportCoordinateSwizzleNV w;
+
+
+  public:
+    uint32_t x = {};
+    uint32_t y = {};
+    uint32_t z = {};
+
   };
-  static_assert( sizeof( ViewportSwizzleNV ) == sizeof( VkViewportSwizzleNV ), "struct and wrapper have different size!" );
+  static_assert( sizeof( DispatchIndirectCommand ) == sizeof( VkDispatchIndirectCommand ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DispatchIndirectCommand>::value, "struct wrapper is not a standard layout!" );
 
-  struct PipelineViewportSwizzleStateCreateInfoNV
+  struct DisplayNativeHdrSurfaceCapabilitiesAMD
   {
-    PipelineViewportSwizzleStateCreateInfoNV( PipelineViewportSwizzleStateCreateFlagsNV flags_ = PipelineViewportSwizzleStateCreateFlagsNV(),
-                                              uint32_t viewportCount_ = 0,
-                                              const ViewportSwizzleNV* pViewportSwizzles_ = nullptr )
-      : flags( flags_ )
-      , viewportCount( viewportCount_ )
-      , pViewportSwizzles( pViewportSwizzles_ )
-    {
-    }
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD;
 
-    PipelineViewportSwizzleStateCreateInfoNV( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PipelineViewportSwizzleStateCreateInfoNV ) );
-    }
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DisplayNativeHdrSurfaceCapabilitiesAMD(VULKAN_HPP_NAMESPACE::Bool32 localDimmingSupport_ = {}) VULKAN_HPP_NOEXCEPT
+    : localDimmingSupport( localDimmingSupport_ )
+    {}
 
-    PipelineViewportSwizzleStateCreateInfoNV& operator=( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs )
+    VULKAN_HPP_CONSTEXPR DisplayNativeHdrSurfaceCapabilitiesAMD( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DisplayNativeHdrSurfaceCapabilitiesAMD( VkDisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      memcpy( this, &rhs, sizeof( PipelineViewportSwizzleStateCreateInfoNV ) );
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DisplayNativeHdrSurfaceCapabilitiesAMD & operator=( VkDisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayNativeHdrSurfaceCapabilitiesAMD const *>( &rhs );
       return *this;
     }
-    PipelineViewportSwizzleStateCreateInfoNV& setPNext( const void* pNext_ )
+
+    DisplayNativeHdrSurfaceCapabilitiesAMD & operator=( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DisplayNativeHdrSurfaceCapabilitiesAMD ) );
+      return *this;
+    }
+
+
+    operator VkDisplayNativeHdrSurfaceCapabilitiesAMD const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDisplayNativeHdrSurfaceCapabilitiesAMD*>( this );
+    }
+
+    operator VkDisplayNativeHdrSurfaceCapabilitiesAMD &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDisplayNativeHdrSurfaceCapabilitiesAMD*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DisplayNativeHdrSurfaceCapabilitiesAMD const& ) const = default;
+#else
+    bool operator==( DisplayNativeHdrSurfaceCapabilitiesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( localDimmingSupport == rhs.localDimmingSupport );
+    }
+
+    bool operator!=( DisplayNativeHdrSurfaceCapabilitiesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 localDimmingSupport = {};
+
+  };
+  static_assert( sizeof( DisplayNativeHdrSurfaceCapabilitiesAMD ) == sizeof( VkDisplayNativeHdrSurfaceCapabilitiesAMD ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DisplayNativeHdrSurfaceCapabilitiesAMD>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD>
+  {
+    using Type = DisplayNativeHdrSurfaceCapabilitiesAMD;
+  };
+
+  struct DisplayPresentInfoKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPresentInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DisplayPresentInfoKHR(VULKAN_HPP_NAMESPACE::Rect2D srcRect_ = {}, VULKAN_HPP_NAMESPACE::Rect2D dstRect_ = {}, VULKAN_HPP_NAMESPACE::Bool32 persistent_ = {}) VULKAN_HPP_NOEXCEPT
+    : srcRect( srcRect_ ), dstRect( dstRect_ ), persistent( persistent_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DisplayPresentInfoKHR( DisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DisplayPresentInfoKHR( VkDisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DisplayPresentInfoKHR & operator=( VkDisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPresentInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+    DisplayPresentInfoKHR & operator=( DisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DisplayPresentInfoKHR ) );
+      return *this;
+    }
+
+    DisplayPresentInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
       pNext = pNext_;
       return *this;
     }
 
-    PipelineViewportSwizzleStateCreateInfoNV& setFlags( PipelineViewportSwizzleStateCreateFlagsNV flags_ )
+    DisplayPresentInfoKHR & setSrcRect( VULKAN_HPP_NAMESPACE::Rect2D const & srcRect_ ) VULKAN_HPP_NOEXCEPT
     {
-      flags = flags_;
+      srcRect = srcRect_;
       return *this;
     }
 
-    PipelineViewportSwizzleStateCreateInfoNV& setViewportCount( uint32_t viewportCount_ )
+    DisplayPresentInfoKHR & setDstRect( VULKAN_HPP_NAMESPACE::Rect2D const & dstRect_ ) VULKAN_HPP_NOEXCEPT
     {
-      viewportCount = viewportCount_;
+      dstRect = dstRect_;
       return *this;
     }
 
-    PipelineViewportSwizzleStateCreateInfoNV& setPViewportSwizzles( const ViewportSwizzleNV* pViewportSwizzles_ )
+    DisplayPresentInfoKHR & setPersistent( VULKAN_HPP_NAMESPACE::Bool32 persistent_ ) VULKAN_HPP_NOEXCEPT
     {
-      pViewportSwizzles = pViewportSwizzles_;
+      persistent = persistent_;
       return *this;
     }
 
-    operator VkPipelineViewportSwizzleStateCreateInfoNV const&() const
+
+    operator VkDisplayPresentInfoKHR const&() const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkPipelineViewportSwizzleStateCreateInfoNV*>(this);
+      return *reinterpret_cast<const VkDisplayPresentInfoKHR*>( this );
     }
 
-    operator VkPipelineViewportSwizzleStateCreateInfoNV &()
+    operator VkDisplayPresentInfoKHR &() VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkPipelineViewportSwizzleStateCreateInfoNV*>(this);
+      return *reinterpret_cast<VkDisplayPresentInfoKHR*>( this );
     }
 
-    bool operator==( PipelineViewportSwizzleStateCreateInfoNV const& rhs ) const
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DisplayPresentInfoKHR const& ) const = default;
+#else
+    bool operator==( DisplayPresentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( viewportCount == rhs.viewportCount )
-          && ( pViewportSwizzles == rhs.pViewportSwizzles );
+          && ( srcRect == rhs.srcRect )
+          && ( dstRect == rhs.dstRect )
+          && ( persistent == rhs.persistent );
     }
 
-    bool operator!=( PipelineViewportSwizzleStateCreateInfoNV const& rhs ) const
+    bool operator!=( DisplayPresentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
+#endif
 
-  private:
-    StructureType sType = StructureType::ePipelineViewportSwizzleStateCreateInfoNV;
+
 
   public:
-    const void* pNext = nullptr;
-    PipelineViewportSwizzleStateCreateFlagsNV flags;
-    uint32_t viewportCount;
-    const ViewportSwizzleNV* pViewportSwizzles;
-  };
-  static_assert( sizeof( PipelineViewportSwizzleStateCreateInfoNV ) == sizeof( VkPipelineViewportSwizzleStateCreateInfoNV ), "struct and wrapper have different size!" );
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPresentInfoKHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Rect2D srcRect = {};
+    VULKAN_HPP_NAMESPACE::Rect2D dstRect = {};
+    VULKAN_HPP_NAMESPACE::Bool32 persistent = {};
 
-  enum class DiscardRectangleModeEXT
+  };
+  static_assert( sizeof( DisplayPresentInfoKHR ) == sizeof( VkDisplayPresentInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DisplayPresentInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eDisplayPresentInfoKHR>
   {
-    eInclusive = VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT,
-    eExclusive = VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT
+    using Type = DisplayPresentInfoKHR;
   };
 
-  struct PipelineDiscardRectangleStateCreateInfoEXT
+  struct DisplaySurfaceCreateInfoKHR
   {
-    PipelineDiscardRectangleStateCreateInfoEXT( PipelineDiscardRectangleStateCreateFlagsEXT flags_ = PipelineDiscardRectangleStateCreateFlagsEXT(),
-                                                DiscardRectangleModeEXT discardRectangleMode_ = DiscardRectangleModeEXT::eInclusive,
-                                                uint32_t discardRectangleCount_ = 0,
-                                                const Rect2D* pDiscardRectangles_ = nullptr )
-      : flags( flags_ )
-      , discardRectangleMode( discardRectangleMode_ )
-      , discardRectangleCount( discardRectangleCount_ )
-      , pDiscardRectangles( pDiscardRectangles_ )
-    {
-    }
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplaySurfaceCreateInfoKHR;
 
-    PipelineDiscardRectangleStateCreateInfoEXT( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PipelineDiscardRectangleStateCreateInfoEXT ) );
-    }
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DisplaySurfaceCreateInfoKHR(VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags_ = {}, VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ = {}, uint32_t planeIndex_ = {}, uint32_t planeStackIndex_ = {}, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, float globalAlpha_ = {}, VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode_ = VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR::eOpaque, VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), displayMode( displayMode_ ), planeIndex( planeIndex_ ), planeStackIndex( planeStackIndex_ ), transform( transform_ ), globalAlpha( globalAlpha_ ), alphaMode( alphaMode_ ), imageExtent( imageExtent_ )
+    {}
 
-    PipelineDiscardRectangleStateCreateInfoEXT& operator=( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs )
+    VULKAN_HPP_CONSTEXPR DisplaySurfaceCreateInfoKHR( DisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DisplaySurfaceCreateInfoKHR( VkDisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      memcpy( this, &rhs, sizeof( PipelineDiscardRectangleStateCreateInfoEXT ) );
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DisplaySurfaceCreateInfoKHR & operator=( VkDisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR const *>( &rhs );
       return *this;
     }
-    PipelineDiscardRectangleStateCreateInfoEXT& setPNext( const void* pNext_ )
+
+    DisplaySurfaceCreateInfoKHR & operator=( DisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DisplaySurfaceCreateInfoKHR ) );
+      return *this;
+    }
+
+    DisplaySurfaceCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
       pNext = pNext_;
       return *this;
     }
 
-    PipelineDiscardRectangleStateCreateInfoEXT& setFlags( PipelineDiscardRectangleStateCreateFlagsEXT flags_ )
+    DisplaySurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
     {
       flags = flags_;
       return *this;
     }
 
-    PipelineDiscardRectangleStateCreateInfoEXT& setDiscardRectangleMode( DiscardRectangleModeEXT discardRectangleMode_ )
+    DisplaySurfaceCreateInfoKHR & setDisplayMode( VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ ) VULKAN_HPP_NOEXCEPT
     {
-      discardRectangleMode = discardRectangleMode_;
+      displayMode = displayMode_;
       return *this;
     }
 
-    PipelineDiscardRectangleStateCreateInfoEXT& setDiscardRectangleCount( uint32_t discardRectangleCount_ )
+    DisplaySurfaceCreateInfoKHR & setPlaneIndex( uint32_t planeIndex_ ) VULKAN_HPP_NOEXCEPT
     {
-      discardRectangleCount = discardRectangleCount_;
+      planeIndex = planeIndex_;
       return *this;
     }
 
-    PipelineDiscardRectangleStateCreateInfoEXT& setPDiscardRectangles( const Rect2D* pDiscardRectangles_ )
+    DisplaySurfaceCreateInfoKHR & setPlaneStackIndex( uint32_t planeStackIndex_ ) VULKAN_HPP_NOEXCEPT
     {
-      pDiscardRectangles = pDiscardRectangles_;
+      planeStackIndex = planeStackIndex_;
       return *this;
     }
 
-    operator VkPipelineDiscardRectangleStateCreateInfoEXT const&() const
+    DisplaySurfaceCreateInfoKHR & setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkPipelineDiscardRectangleStateCreateInfoEXT*>(this);
+      transform = transform_;
+      return *this;
     }
 
-    operator VkPipelineDiscardRectangleStateCreateInfoEXT &()
+    DisplaySurfaceCreateInfoKHR & setGlobalAlpha( float globalAlpha_ ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkPipelineDiscardRectangleStateCreateInfoEXT*>(this);
+      globalAlpha = globalAlpha_;
+      return *this;
     }
 
-    bool operator==( PipelineDiscardRectangleStateCreateInfoEXT const& rhs ) const
+    DisplaySurfaceCreateInfoKHR & setAlphaMode( VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      alphaMode = alphaMode_;
+      return *this;
+    }
+
+    DisplaySurfaceCreateInfoKHR & setImageExtent( VULKAN_HPP_NAMESPACE::Extent2D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageExtent = imageExtent_;
+      return *this;
+    }
+
+
+    operator VkDisplaySurfaceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR*>( this );
+    }
+
+    operator VkDisplaySurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDisplaySurfaceCreateInfoKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DisplaySurfaceCreateInfoKHR const& ) const = default;
+#else
+    bool operator==( DisplaySurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
           && ( flags == rhs.flags )
-          && ( discardRectangleMode == rhs.discardRectangleMode )
-          && ( discardRectangleCount == rhs.discardRectangleCount )
-          && ( pDiscardRectangles == rhs.pDiscardRectangles );
+          && ( displayMode == rhs.displayMode )
+          && ( planeIndex == rhs.planeIndex )
+          && ( planeStackIndex == rhs.planeStackIndex )
+          && ( transform == rhs.transform )
+          && ( globalAlpha == rhs.globalAlpha )
+          && ( alphaMode == rhs.alphaMode )
+          && ( imageExtent == rhs.imageExtent );
     }
 
-    bool operator!=( PipelineDiscardRectangleStateCreateInfoEXT const& rhs ) const
+    bool operator!=( DisplaySurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
+#endif
 
-  private:
-    StructureType sType = StructureType::ePipelineDiscardRectangleStateCreateInfoEXT;
+
 
   public:
-    const void* pNext = nullptr;
-    PipelineDiscardRectangleStateCreateFlagsEXT flags;
-    DiscardRectangleModeEXT discardRectangleMode;
-    uint32_t discardRectangleCount;
-    const Rect2D* pDiscardRectangles;
-  };
-  static_assert( sizeof( PipelineDiscardRectangleStateCreateInfoEXT ) == sizeof( VkPipelineDiscardRectangleStateCreateInfoEXT ), "struct and wrapper have different size!" );
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplaySurfaceCreateInfoKHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags = {};
+    VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode = {};
+    uint32_t planeIndex = {};
+    uint32_t planeStackIndex = {};
+    VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
+    float globalAlpha = {};
+    VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode = VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR::eOpaque;
+    VULKAN_HPP_NAMESPACE::Extent2D imageExtent = {};
 
-  enum class SubpassDescriptionFlagBits
+  };
+  static_assert( sizeof( DisplaySurfaceCreateInfoKHR ) == sizeof( VkDisplaySurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DisplaySurfaceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eDisplaySurfaceCreateInfoKHR>
   {
-    ePerViewAttributesNVX = VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX,
-    ePerViewPositionXOnlyNVX = VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX
+    using Type = DisplaySurfaceCreateInfoKHR;
   };
 
-  using SubpassDescriptionFlags = Flags<SubpassDescriptionFlagBits, VkSubpassDescriptionFlags>;
-
-  VULKAN_HPP_INLINE SubpassDescriptionFlags operator|( SubpassDescriptionFlagBits bit0, SubpassDescriptionFlagBits bit1 )
+  struct DrawIndexedIndirectCommand
   {
-    return SubpassDescriptionFlags( bit0 ) | bit1;
-  }
 
-  VULKAN_HPP_INLINE SubpassDescriptionFlags operator~( SubpassDescriptionFlagBits bits )
-  {
-    return ~( SubpassDescriptionFlags( bits ) );
-  }
 
-  template <> struct FlagTraits<SubpassDescriptionFlagBits>
-  {
-    enum
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DrawIndexedIndirectCommand(uint32_t indexCount_ = {}, uint32_t instanceCount_ = {}, uint32_t firstIndex_ = {}, int32_t vertexOffset_ = {}, uint32_t firstInstance_ = {}) VULKAN_HPP_NOEXCEPT
+    : indexCount( indexCount_ ), instanceCount( instanceCount_ ), firstIndex( firstIndex_ ), vertexOffset( vertexOffset_ ), firstInstance( firstInstance_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DrawIndexedIndirectCommand( DrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DrawIndexedIndirectCommand( VkDrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      allFlags = VkFlags(SubpassDescriptionFlagBits::ePerViewAttributesNVX) | VkFlags(SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX)
-    };
-  };
-
-  struct SubpassDescription
-  {
-    SubpassDescription( SubpassDescriptionFlags flags_ = SubpassDescriptionFlags(),
-                        PipelineBindPoint pipelineBindPoint_ = PipelineBindPoint::eGraphics,
-                        uint32_t inputAttachmentCount_ = 0,
-                        const AttachmentReference* pInputAttachments_ = nullptr,
-                        uint32_t colorAttachmentCount_ = 0,
-                        const AttachmentReference* pColorAttachments_ = nullptr,
-                        const AttachmentReference* pResolveAttachments_ = nullptr,
-                        const AttachmentReference* pDepthStencilAttachment_ = nullptr,
-                        uint32_t preserveAttachmentCount_ = 0,
-                        const uint32_t* pPreserveAttachments_ = nullptr )
-      : flags( flags_ )
-      , pipelineBindPoint( pipelineBindPoint_ )
-      , inputAttachmentCount( inputAttachmentCount_ )
-      , pInputAttachments( pInputAttachments_ )
-      , colorAttachmentCount( colorAttachmentCount_ )
-      , pColorAttachments( pColorAttachments_ )
-      , pResolveAttachments( pResolveAttachments_ )
-      , pDepthStencilAttachment( pDepthStencilAttachment_ )
-      , preserveAttachmentCount( preserveAttachmentCount_ )
-      , pPreserveAttachments( pPreserveAttachments_ )
-    {
+      *this = rhs;
     }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    SubpassDescription( VkSubpassDescription const & rhs )
+    DrawIndexedIndirectCommand & operator=( VkDrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      memcpy( this, &rhs, sizeof( SubpassDescription ) );
-    }
-
-    SubpassDescription& operator=( VkSubpassDescription const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( SubpassDescription ) );
-      return *this;
-    }
-    SubpassDescription& setFlags( SubpassDescriptionFlags flags_ )
-    {
-      flags = flags_;
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrawIndexedIndirectCommand const *>( &rhs );
       return *this;
     }
 
-    SubpassDescription& setPipelineBindPoint( PipelineBindPoint pipelineBindPoint_ )
+    DrawIndexedIndirectCommand & operator=( DrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      pipelineBindPoint = pipelineBindPoint_;
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DrawIndexedIndirectCommand ) );
       return *this;
     }
 
-    SubpassDescription& setInputAttachmentCount( uint32_t inputAttachmentCount_ )
+    DrawIndexedIndirectCommand & setIndexCount( uint32_t indexCount_ ) VULKAN_HPP_NOEXCEPT
     {
-      inputAttachmentCount = inputAttachmentCount_;
+      indexCount = indexCount_;
       return *this;
     }
 
-    SubpassDescription& setPInputAttachments( const AttachmentReference* pInputAttachments_ )
+    DrawIndexedIndirectCommand & setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT
     {
-      pInputAttachments = pInputAttachments_;
+      instanceCount = instanceCount_;
       return *this;
     }
 
-    SubpassDescription& setColorAttachmentCount( uint32_t colorAttachmentCount_ )
+    DrawIndexedIndirectCommand & setFirstIndex( uint32_t firstIndex_ ) VULKAN_HPP_NOEXCEPT
     {
-      colorAttachmentCount = colorAttachmentCount_;
+      firstIndex = firstIndex_;
       return *this;
     }
 
-    SubpassDescription& setPColorAttachments( const AttachmentReference* pColorAttachments_ )
+    DrawIndexedIndirectCommand & setVertexOffset( int32_t vertexOffset_ ) VULKAN_HPP_NOEXCEPT
     {
-      pColorAttachments = pColorAttachments_;
+      vertexOffset = vertexOffset_;
       return *this;
     }
 
-    SubpassDescription& setPResolveAttachments( const AttachmentReference* pResolveAttachments_ )
+    DrawIndexedIndirectCommand & setFirstInstance( uint32_t firstInstance_ ) VULKAN_HPP_NOEXCEPT
     {
-      pResolveAttachments = pResolveAttachments_;
+      firstInstance = firstInstance_;
       return *this;
     }
 
-    SubpassDescription& setPDepthStencilAttachment( const AttachmentReference* pDepthStencilAttachment_ )
+
+    operator VkDrawIndexedIndirectCommand const&() const VULKAN_HPP_NOEXCEPT
     {
-      pDepthStencilAttachment = pDepthStencilAttachment_;
-      return *this;
+      return *reinterpret_cast<const VkDrawIndexedIndirectCommand*>( this );
     }
 
-    SubpassDescription& setPreserveAttachmentCount( uint32_t preserveAttachmentCount_ )
+    operator VkDrawIndexedIndirectCommand &() VULKAN_HPP_NOEXCEPT
     {
-      preserveAttachmentCount = preserveAttachmentCount_;
-      return *this;
+      return *reinterpret_cast<VkDrawIndexedIndirectCommand*>( this );
     }
 
-    SubpassDescription& setPPreserveAttachments( const uint32_t* pPreserveAttachments_ )
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DrawIndexedIndirectCommand const& ) const = default;
+#else
+    bool operator==( DrawIndexedIndirectCommand const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      pPreserveAttachments = pPreserveAttachments_;
-      return *this;
+      return ( indexCount == rhs.indexCount )
+          && ( instanceCount == rhs.instanceCount )
+          && ( firstIndex == rhs.firstIndex )
+          && ( vertexOffset == rhs.vertexOffset )
+          && ( firstInstance == rhs.firstInstance );
     }
 
-    operator VkSubpassDescription const&() const
-    {
-      return *reinterpret_cast<const VkSubpassDescription*>(this);
-    }
-
-    operator VkSubpassDescription &()
-    {
-      return *reinterpret_cast<VkSubpassDescription*>(this);
-    }
-
-    bool operator==( SubpassDescription const& rhs ) const
-    {
-      return ( flags == rhs.flags )
-          && ( pipelineBindPoint == rhs.pipelineBindPoint )
-          && ( inputAttachmentCount == rhs.inputAttachmentCount )
-          && ( pInputAttachments == rhs.pInputAttachments )
-          && ( colorAttachmentCount == rhs.colorAttachmentCount )
-          && ( pColorAttachments == rhs.pColorAttachments )
-          && ( pResolveAttachments == rhs.pResolveAttachments )
-          && ( pDepthStencilAttachment == rhs.pDepthStencilAttachment )
-          && ( preserveAttachmentCount == rhs.preserveAttachmentCount )
-          && ( pPreserveAttachments == rhs.pPreserveAttachments );
-    }
-
-    bool operator!=( SubpassDescription const& rhs ) const
+    bool operator!=( DrawIndexedIndirectCommand const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
+#endif
 
-    SubpassDescriptionFlags flags;
-    PipelineBindPoint pipelineBindPoint;
-    uint32_t inputAttachmentCount;
-    const AttachmentReference* pInputAttachments;
-    uint32_t colorAttachmentCount;
-    const AttachmentReference* pColorAttachments;
-    const AttachmentReference* pResolveAttachments;
-    const AttachmentReference* pDepthStencilAttachment;
-    uint32_t preserveAttachmentCount;
-    const uint32_t* pPreserveAttachments;
+
+
+  public:
+    uint32_t indexCount = {};
+    uint32_t instanceCount = {};
+    uint32_t firstIndex = {};
+    int32_t vertexOffset = {};
+    uint32_t firstInstance = {};
+
   };
-  static_assert( sizeof( SubpassDescription ) == sizeof( VkSubpassDescription ), "struct and wrapper have different size!" );
+  static_assert( sizeof( DrawIndexedIndirectCommand ) == sizeof( VkDrawIndexedIndirectCommand ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DrawIndexedIndirectCommand>::value, "struct wrapper is not a standard layout!" );
 
-  struct RenderPassCreateInfo
+  struct DrawIndirectCommand
   {
-    RenderPassCreateInfo( RenderPassCreateFlags flags_ = RenderPassCreateFlags(),
-                          uint32_t attachmentCount_ = 0,
-                          const AttachmentDescription* pAttachments_ = nullptr,
-                          uint32_t subpassCount_ = 0,
-                          const SubpassDescription* pSubpasses_ = nullptr,
-                          uint32_t dependencyCount_ = 0,
-                          const SubpassDependency* pDependencies_ = nullptr )
-      : flags( flags_ )
-      , attachmentCount( attachmentCount_ )
-      , pAttachments( pAttachments_ )
-      , subpassCount( subpassCount_ )
-      , pSubpasses( pSubpasses_ )
-      , dependencyCount( dependencyCount_ )
-      , pDependencies( pDependencies_ )
-    {
-    }
 
-    RenderPassCreateInfo( VkRenderPassCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( RenderPassCreateInfo ) );
-    }
 
-    RenderPassCreateInfo& operator=( VkRenderPassCreateInfo const & rhs )
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DrawIndirectCommand(uint32_t vertexCount_ = {}, uint32_t instanceCount_ = {}, uint32_t firstVertex_ = {}, uint32_t firstInstance_ = {}) VULKAN_HPP_NOEXCEPT
+    : vertexCount( vertexCount_ ), instanceCount( instanceCount_ ), firstVertex( firstVertex_ ), firstInstance( firstInstance_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DrawIndirectCommand( DrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DrawIndirectCommand( VkDrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      memcpy( this, &rhs, sizeof( RenderPassCreateInfo ) );
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DrawIndirectCommand & operator=( VkDrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrawIndirectCommand const *>( &rhs );
       return *this;
     }
-    RenderPassCreateInfo& setPNext( const void* pNext_ )
+
+    DrawIndirectCommand & operator=( DrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DrawIndirectCommand ) );
+      return *this;
+    }
+
+    DrawIndirectCommand & setVertexCount( uint32_t vertexCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vertexCount = vertexCount_;
+      return *this;
+    }
+
+    DrawIndirectCommand & setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      instanceCount = instanceCount_;
+      return *this;
+    }
+
+    DrawIndirectCommand & setFirstVertex( uint32_t firstVertex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      firstVertex = firstVertex_;
+      return *this;
+    }
+
+    DrawIndirectCommand & setFirstInstance( uint32_t firstInstance_ ) VULKAN_HPP_NOEXCEPT
+    {
+      firstInstance = firstInstance_;
+      return *this;
+    }
+
+
+    operator VkDrawIndirectCommand const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDrawIndirectCommand*>( this );
+    }
+
+    operator VkDrawIndirectCommand &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDrawIndirectCommand*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DrawIndirectCommand const& ) const = default;
+#else
+    bool operator==( DrawIndirectCommand const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( vertexCount == rhs.vertexCount )
+          && ( instanceCount == rhs.instanceCount )
+          && ( firstVertex == rhs.firstVertex )
+          && ( firstInstance == rhs.firstInstance );
+    }
+
+    bool operator!=( DrawIndirectCommand const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    uint32_t vertexCount = {};
+    uint32_t instanceCount = {};
+    uint32_t firstVertex = {};
+    uint32_t firstInstance = {};
+
+  };
+  static_assert( sizeof( DrawIndirectCommand ) == sizeof( VkDrawIndirectCommand ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DrawIndirectCommand>::value, "struct wrapper is not a standard layout!" );
+
+  struct DrawMeshTasksIndirectCommandNV
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DrawMeshTasksIndirectCommandNV(uint32_t taskCount_ = {}, uint32_t firstTask_ = {}) VULKAN_HPP_NOEXCEPT
+    : taskCount( taskCount_ ), firstTask( firstTask_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DrawMeshTasksIndirectCommandNV( DrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DrawMeshTasksIndirectCommandNV( VkDrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DrawMeshTasksIndirectCommandNV & operator=( VkDrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandNV const *>( &rhs );
+      return *this;
+    }
+
+    DrawMeshTasksIndirectCommandNV & operator=( DrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DrawMeshTasksIndirectCommandNV ) );
+      return *this;
+    }
+
+    DrawMeshTasksIndirectCommandNV & setTaskCount( uint32_t taskCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      taskCount = taskCount_;
+      return *this;
+    }
+
+    DrawMeshTasksIndirectCommandNV & setFirstTask( uint32_t firstTask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      firstTask = firstTask_;
+      return *this;
+    }
+
+
+    operator VkDrawMeshTasksIndirectCommandNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDrawMeshTasksIndirectCommandNV*>( this );
+    }
+
+    operator VkDrawMeshTasksIndirectCommandNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDrawMeshTasksIndirectCommandNV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DrawMeshTasksIndirectCommandNV const& ) const = default;
+#else
+    bool operator==( DrawMeshTasksIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( taskCount == rhs.taskCount )
+          && ( firstTask == rhs.firstTask );
+    }
+
+    bool operator!=( DrawMeshTasksIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    uint32_t taskCount = {};
+    uint32_t firstTask = {};
+
+  };
+  static_assert( sizeof( DrawMeshTasksIndirectCommandNV ) == sizeof( VkDrawMeshTasksIndirectCommandNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DrawMeshTasksIndirectCommandNV>::value, "struct wrapper is not a standard layout!" );
+
+  struct DrmFormatModifierPropertiesEXT
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesEXT(uint64_t drmFormatModifier_ = {}, uint32_t drmFormatModifierPlaneCount_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags drmFormatModifierTilingFeatures_ = {}) VULKAN_HPP_NOEXCEPT
+    : drmFormatModifier( drmFormatModifier_ ), drmFormatModifierPlaneCount( drmFormatModifierPlaneCount_ ), drmFormatModifierTilingFeatures( drmFormatModifierTilingFeatures_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesEXT( DrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DrmFormatModifierPropertiesEXT( VkDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DrmFormatModifierPropertiesEXT & operator=( VkDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT const *>( &rhs );
+      return *this;
+    }
+
+    DrmFormatModifierPropertiesEXT & operator=( DrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DrmFormatModifierPropertiesEXT ) );
+      return *this;
+    }
+
+
+    operator VkDrmFormatModifierPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDrmFormatModifierPropertiesEXT*>( this );
+    }
+
+    operator VkDrmFormatModifierPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDrmFormatModifierPropertiesEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DrmFormatModifierPropertiesEXT const& ) const = default;
+#else
+    bool operator==( DrmFormatModifierPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( drmFormatModifier == rhs.drmFormatModifier )
+          && ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount )
+          && ( drmFormatModifierTilingFeatures == rhs.drmFormatModifierTilingFeatures );
+    }
+
+    bool operator!=( DrmFormatModifierPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    uint64_t drmFormatModifier = {};
+    uint32_t drmFormatModifierPlaneCount = {};
+    VULKAN_HPP_NAMESPACE::FormatFeatureFlags drmFormatModifierTilingFeatures = {};
+
+  };
+  static_assert( sizeof( DrmFormatModifierPropertiesEXT ) == sizeof( VkDrmFormatModifierPropertiesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DrmFormatModifierPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct DrmFormatModifierPropertiesListEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDrmFormatModifierPropertiesListEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesListEXT(uint32_t drmFormatModifierCount_ = {}, VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT* pDrmFormatModifierProperties_ = {}) VULKAN_HPP_NOEXCEPT
+    : drmFormatModifierCount( drmFormatModifierCount_ ), pDrmFormatModifierProperties( pDrmFormatModifierProperties_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesListEXT( DrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DrmFormatModifierPropertiesListEXT( VkDrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    DrmFormatModifierPropertiesListEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT> const & drmFormatModifierProperties_ )
+    : drmFormatModifierCount( static_cast<uint32_t>( drmFormatModifierProperties_.size() ) ), pDrmFormatModifierProperties( drmFormatModifierProperties_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    DrmFormatModifierPropertiesListEXT & operator=( VkDrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesListEXT const *>( &rhs );
+      return *this;
+    }
+
+    DrmFormatModifierPropertiesListEXT & operator=( DrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( DrmFormatModifierPropertiesListEXT ) );
+      return *this;
+    }
+
+
+    operator VkDrmFormatModifierPropertiesListEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDrmFormatModifierPropertiesListEXT*>( this );
+    }
+
+    operator VkDrmFormatModifierPropertiesListEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDrmFormatModifierPropertiesListEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DrmFormatModifierPropertiesListEXT const& ) const = default;
+#else
+    bool operator==( DrmFormatModifierPropertiesListEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( drmFormatModifierCount == rhs.drmFormatModifierCount )
+          && ( pDrmFormatModifierProperties == rhs.pDrmFormatModifierProperties );
+    }
+
+    bool operator!=( DrmFormatModifierPropertiesListEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDrmFormatModifierPropertiesListEXT;
+    void* pNext = {};
+    uint32_t drmFormatModifierCount = {};
+    VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT* pDrmFormatModifierProperties = {};
+
+  };
+  static_assert( sizeof( DrmFormatModifierPropertiesListEXT ) == sizeof( VkDrmFormatModifierPropertiesListEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DrmFormatModifierPropertiesListEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eDrmFormatModifierPropertiesListEXT>
+  {
+    using Type = DrmFormatModifierPropertiesListEXT;
+  };
+
+  struct ExportFenceCreateInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportFenceCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ExportFenceCreateInfo(VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes_ = {}) VULKAN_HPP_NOEXCEPT
+    : handleTypes( handleTypes_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ExportFenceCreateInfo( ExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ExportFenceCreateInfo( VkExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ExportFenceCreateInfo & operator=( VkExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportFenceCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+    ExportFenceCreateInfo & operator=( ExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ExportFenceCreateInfo ) );
+      return *this;
+    }
+
+    ExportFenceCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
       pNext = pNext_;
       return *this;
     }
 
-    RenderPassCreateInfo& setFlags( RenderPassCreateFlags flags_ )
+    ExportFenceCreateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
     {
-      flags = flags_;
+      handleTypes = handleTypes_;
       return *this;
     }
 
-    RenderPassCreateInfo& setAttachmentCount( uint32_t attachmentCount_ )
+
+    operator VkExportFenceCreateInfo const&() const VULKAN_HPP_NOEXCEPT
     {
-      attachmentCount = attachmentCount_;
-      return *this;
+      return *reinterpret_cast<const VkExportFenceCreateInfo*>( this );
     }
 
-    RenderPassCreateInfo& setPAttachments( const AttachmentDescription* pAttachments_ )
+    operator VkExportFenceCreateInfo &() VULKAN_HPP_NOEXCEPT
     {
-      pAttachments = pAttachments_;
-      return *this;
+      return *reinterpret_cast<VkExportFenceCreateInfo*>( this );
     }
 
-    RenderPassCreateInfo& setSubpassCount( uint32_t subpassCount_ )
-    {
-      subpassCount = subpassCount_;
-      return *this;
-    }
 
-    RenderPassCreateInfo& setPSubpasses( const SubpassDescription* pSubpasses_ )
-    {
-      pSubpasses = pSubpasses_;
-      return *this;
-    }
-
-    RenderPassCreateInfo& setDependencyCount( uint32_t dependencyCount_ )
-    {
-      dependencyCount = dependencyCount_;
-      return *this;
-    }
-
-    RenderPassCreateInfo& setPDependencies( const SubpassDependency* pDependencies_ )
-    {
-      pDependencies = pDependencies_;
-      return *this;
-    }
-
-    operator VkRenderPassCreateInfo const&() const
-    {
-      return *reinterpret_cast<const VkRenderPassCreateInfo*>(this);
-    }
-
-    operator VkRenderPassCreateInfo &()
-    {
-      return *reinterpret_cast<VkRenderPassCreateInfo*>(this);
-    }
-
-    bool operator==( RenderPassCreateInfo const& rhs ) const
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ExportFenceCreateInfo const& ) const = default;
+#else
+    bool operator==( ExportFenceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( attachmentCount == rhs.attachmentCount )
-          && ( pAttachments == rhs.pAttachments )
-          && ( subpassCount == rhs.subpassCount )
-          && ( pSubpasses == rhs.pSubpasses )
-          && ( dependencyCount == rhs.dependencyCount )
-          && ( pDependencies == rhs.pDependencies );
+          && ( handleTypes == rhs.handleTypes );
     }
 
-    bool operator!=( RenderPassCreateInfo const& rhs ) const
+    bool operator!=( ExportFenceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
+#endif
 
-  private:
-    StructureType sType = StructureType::eRenderPassCreateInfo;
+
 
   public:
-    const void* pNext = nullptr;
-    RenderPassCreateFlags flags;
-    uint32_t attachmentCount;
-    const AttachmentDescription* pAttachments;
-    uint32_t subpassCount;
-    const SubpassDescription* pSubpasses;
-    uint32_t dependencyCount;
-    const SubpassDependency* pDependencies;
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportFenceCreateInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes = {};
+
   };
-  static_assert( sizeof( RenderPassCreateInfo ) == sizeof( VkRenderPassCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( sizeof( ExportFenceCreateInfo ) == sizeof( VkExportFenceCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ExportFenceCreateInfo>::value, "struct wrapper is not a standard layout!" );
 
-  struct SubpassDescription2KHR
+  template <>
+  struct CppType<StructureType, StructureType::eExportFenceCreateInfo>
   {
-    SubpassDescription2KHR( SubpassDescriptionFlags flags_ = SubpassDescriptionFlags(),
-                            PipelineBindPoint pipelineBindPoint_ = PipelineBindPoint::eGraphics,
-                            uint32_t viewMask_ = 0,
-                            uint32_t inputAttachmentCount_ = 0,
-                            const AttachmentReference2KHR* pInputAttachments_ = nullptr,
-                            uint32_t colorAttachmentCount_ = 0,
-                            const AttachmentReference2KHR* pColorAttachments_ = nullptr,
-                            const AttachmentReference2KHR* pResolveAttachments_ = nullptr,
-                            const AttachmentReference2KHR* pDepthStencilAttachment_ = nullptr,
-                            uint32_t preserveAttachmentCount_ = 0,
-                            const uint32_t* pPreserveAttachments_ = nullptr )
-      : flags( flags_ )
-      , pipelineBindPoint( pipelineBindPoint_ )
-      , viewMask( viewMask_ )
-      , inputAttachmentCount( inputAttachmentCount_ )
-      , pInputAttachments( pInputAttachments_ )
-      , colorAttachmentCount( colorAttachmentCount_ )
-      , pColorAttachments( pColorAttachments_ )
-      , pResolveAttachments( pResolveAttachments_ )
-      , pDepthStencilAttachment( pDepthStencilAttachment_ )
-      , preserveAttachmentCount( preserveAttachmentCount_ )
-      , pPreserveAttachments( pPreserveAttachments_ )
-    {
-    }
+    using Type = ExportFenceCreateInfo;
+  };
+  using ExportFenceCreateInfoKHR = ExportFenceCreateInfo;
 
-    SubpassDescription2KHR( VkSubpassDescription2KHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( SubpassDescription2KHR ) );
-    }
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  struct ExportFenceWin32HandleInfoKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportFenceWin32HandleInfoKHR;
 
-    SubpassDescription2KHR& operator=( VkSubpassDescription2KHR const & rhs )
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ExportFenceWin32HandleInfoKHR(const SECURITY_ATTRIBUTES* pAttributes_ = {}, DWORD dwAccess_ = {}, LPCWSTR name_ = {}) VULKAN_HPP_NOEXCEPT
+    : pAttributes( pAttributes_ ), dwAccess( dwAccess_ ), name( name_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ExportFenceWin32HandleInfoKHR( ExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ExportFenceWin32HandleInfoKHR( VkExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      memcpy( this, &rhs, sizeof( SubpassDescription2KHR ) );
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ExportFenceWin32HandleInfoKHR & operator=( VkExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportFenceWin32HandleInfoKHR const *>( &rhs );
       return *this;
     }
-    SubpassDescription2KHR& setPNext( const void* pNext_ )
+
+    ExportFenceWin32HandleInfoKHR & operator=( ExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ExportFenceWin32HandleInfoKHR ) );
+      return *this;
+    }
+
+    ExportFenceWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
       pNext = pNext_;
       return *this;
     }
 
-    SubpassDescription2KHR& setFlags( SubpassDescriptionFlags flags_ )
+    ExportFenceWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ ) VULKAN_HPP_NOEXCEPT
     {
-      flags = flags_;
+      pAttributes = pAttributes_;
       return *this;
     }
 
-    SubpassDescription2KHR& setPipelineBindPoint( PipelineBindPoint pipelineBindPoint_ )
+    ExportFenceWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT
     {
-      pipelineBindPoint = pipelineBindPoint_;
+      dwAccess = dwAccess_;
       return *this;
     }
 
-    SubpassDescription2KHR& setViewMask( uint32_t viewMask_ )
+    ExportFenceWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT
     {
-      viewMask = viewMask_;
+      name = name_;
       return *this;
     }
 
-    SubpassDescription2KHR& setInputAttachmentCount( uint32_t inputAttachmentCount_ )
+
+    operator VkExportFenceWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT
     {
-      inputAttachmentCount = inputAttachmentCount_;
-      return *this;
+      return *reinterpret_cast<const VkExportFenceWin32HandleInfoKHR*>( this );
     }
 
-    SubpassDescription2KHR& setPInputAttachments( const AttachmentReference2KHR* pInputAttachments_ )
+    operator VkExportFenceWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
     {
-      pInputAttachments = pInputAttachments_;
-      return *this;
+      return *reinterpret_cast<VkExportFenceWin32HandleInfoKHR*>( this );
     }
 
-    SubpassDescription2KHR& setColorAttachmentCount( uint32_t colorAttachmentCount_ )
-    {
-      colorAttachmentCount = colorAttachmentCount_;
-      return *this;
-    }
 
-    SubpassDescription2KHR& setPColorAttachments( const AttachmentReference2KHR* pColorAttachments_ )
-    {
-      pColorAttachments = pColorAttachments_;
-      return *this;
-    }
-
-    SubpassDescription2KHR& setPResolveAttachments( const AttachmentReference2KHR* pResolveAttachments_ )
-    {
-      pResolveAttachments = pResolveAttachments_;
-      return *this;
-    }
-
-    SubpassDescription2KHR& setPDepthStencilAttachment( const AttachmentReference2KHR* pDepthStencilAttachment_ )
-    {
-      pDepthStencilAttachment = pDepthStencilAttachment_;
-      return *this;
-    }
-
-    SubpassDescription2KHR& setPreserveAttachmentCount( uint32_t preserveAttachmentCount_ )
-    {
-      preserveAttachmentCount = preserveAttachmentCount_;
-      return *this;
-    }
-
-    SubpassDescription2KHR& setPPreserveAttachments( const uint32_t* pPreserveAttachments_ )
-    {
-      pPreserveAttachments = pPreserveAttachments_;
-      return *this;
-    }
-
-    operator VkSubpassDescription2KHR const&() const
-    {
-      return *reinterpret_cast<const VkSubpassDescription2KHR*>(this);
-    }
-
-    operator VkSubpassDescription2KHR &()
-    {
-      return *reinterpret_cast<VkSubpassDescription2KHR*>(this);
-    }
-
-    bool operator==( SubpassDescription2KHR const& rhs ) const
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ExportFenceWin32HandleInfoKHR const& ) const = default;
+#else
+    bool operator==( ExportFenceWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
-          && ( flags == rhs.flags )
-          && ( pipelineBindPoint == rhs.pipelineBindPoint )
-          && ( viewMask == rhs.viewMask )
-          && ( inputAttachmentCount == rhs.inputAttachmentCount )
-          && ( pInputAttachments == rhs.pInputAttachments )
-          && ( colorAttachmentCount == rhs.colorAttachmentCount )
-          && ( pColorAttachments == rhs.pColorAttachments )
-          && ( pResolveAttachments == rhs.pResolveAttachments )
-          && ( pDepthStencilAttachment == rhs.pDepthStencilAttachment )
-          && ( preserveAttachmentCount == rhs.preserveAttachmentCount )
-          && ( pPreserveAttachments == rhs.pPreserveAttachments );
+          && ( pAttributes == rhs.pAttributes )
+          && ( dwAccess == rhs.dwAccess )
+          && ( name == rhs.name );
     }
 
-    bool operator!=( SubpassDescription2KHR const& rhs ) const
+    bool operator!=( ExportFenceWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
+#endif
 
-  private:
-    StructureType sType = StructureType::eSubpassDescription2KHR;
+
 
   public:
-    const void* pNext = nullptr;
-    SubpassDescriptionFlags flags;
-    PipelineBindPoint pipelineBindPoint;
-    uint32_t viewMask;
-    uint32_t inputAttachmentCount;
-    const AttachmentReference2KHR* pInputAttachments;
-    uint32_t colorAttachmentCount;
-    const AttachmentReference2KHR* pColorAttachments;
-    const AttachmentReference2KHR* pResolveAttachments;
-    const AttachmentReference2KHR* pDepthStencilAttachment;
-    uint32_t preserveAttachmentCount;
-    const uint32_t* pPreserveAttachments;
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportFenceWin32HandleInfoKHR;
+    const void* pNext = {};
+    const SECURITY_ATTRIBUTES* pAttributes = {};
+    DWORD dwAccess = {};
+    LPCWSTR name = {};
+
   };
-  static_assert( sizeof( SubpassDescription2KHR ) == sizeof( VkSubpassDescription2KHR ), "struct and wrapper have different size!" );
+  static_assert( sizeof( ExportFenceWin32HandleInfoKHR ) == sizeof( VkExportFenceWin32HandleInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ExportFenceWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
 
-  struct RenderPassCreateInfo2KHR
+  template <>
+  struct CppType<StructureType, StructureType::eExportFenceWin32HandleInfoKHR>
   {
-    RenderPassCreateInfo2KHR( RenderPassCreateFlags flags_ = RenderPassCreateFlags(),
-                              uint32_t attachmentCount_ = 0,
-                              const AttachmentDescription2KHR* pAttachments_ = nullptr,
-                              uint32_t subpassCount_ = 0,
-                              const SubpassDescription2KHR* pSubpasses_ = nullptr,
-                              uint32_t dependencyCount_ = 0,
-                              const SubpassDependency2KHR* pDependencies_ = nullptr,
-                              uint32_t correlatedViewMaskCount_ = 0,
-                              const uint32_t* pCorrelatedViewMasks_ = nullptr )
-      : flags( flags_ )
-      , attachmentCount( attachmentCount_ )
-      , pAttachments( pAttachments_ )
-      , subpassCount( subpassCount_ )
-      , pSubpasses( pSubpasses_ )
-      , dependencyCount( dependencyCount_ )
-      , pDependencies( pDependencies_ )
-      , correlatedViewMaskCount( correlatedViewMaskCount_ )
-      , pCorrelatedViewMasks( pCorrelatedViewMasks_ )
-    {
-    }
+    using Type = ExportFenceWin32HandleInfoKHR;
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
 
-    RenderPassCreateInfo2KHR( VkRenderPassCreateInfo2KHR const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( RenderPassCreateInfo2KHR ) );
-    }
+  struct ExportMemoryAllocateInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryAllocateInfo;
 
-    RenderPassCreateInfo2KHR& operator=( VkRenderPassCreateInfo2KHR const & rhs )
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfo(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {}) VULKAN_HPP_NOEXCEPT
+    : handleTypes( handleTypes_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfo( ExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ExportMemoryAllocateInfo( VkExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      memcpy( this, &rhs, sizeof( RenderPassCreateInfo2KHR ) );
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ExportMemoryAllocateInfo & operator=( VkExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfo const *>( &rhs );
       return *this;
     }
-    RenderPassCreateInfo2KHR& setPNext( const void* pNext_ )
+
+    ExportMemoryAllocateInfo & operator=( ExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ExportMemoryAllocateInfo ) );
+      return *this;
+    }
+
+    ExportMemoryAllocateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
       pNext = pNext_;
       return *this;
     }
 
-    RenderPassCreateInfo2KHR& setFlags( RenderPassCreateFlags flags_ )
+    ExportMemoryAllocateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleTypes = handleTypes_;
+      return *this;
+    }
+
+
+    operator VkExportMemoryAllocateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExportMemoryAllocateInfo*>( this );
+    }
+
+    operator VkExportMemoryAllocateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExportMemoryAllocateInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ExportMemoryAllocateInfo const& ) const = default;
+#else
+    bool operator==( ExportMemoryAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( handleTypes == rhs.handleTypes );
+    }
+
+    bool operator!=( ExportMemoryAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryAllocateInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {};
+
+  };
+  static_assert( sizeof( ExportMemoryAllocateInfo ) == sizeof( VkExportMemoryAllocateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ExportMemoryAllocateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eExportMemoryAllocateInfo>
+  {
+    using Type = ExportMemoryAllocateInfo;
+  };
+  using ExportMemoryAllocateInfoKHR = ExportMemoryAllocateInfo;
+
+  struct ExportMemoryAllocateInfoNV
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryAllocateInfoNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfoNV(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ = {}) VULKAN_HPP_NOEXCEPT
+    : handleTypes( handleTypes_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfoNV( ExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ExportMemoryAllocateInfoNV( VkExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ExportMemoryAllocateInfoNV & operator=( VkExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfoNV const *>( &rhs );
+      return *this;
+    }
+
+    ExportMemoryAllocateInfoNV & operator=( ExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ExportMemoryAllocateInfoNV ) );
+      return *this;
+    }
+
+    ExportMemoryAllocateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ExportMemoryAllocateInfoNV & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleTypes = handleTypes_;
+      return *this;
+    }
+
+
+    operator VkExportMemoryAllocateInfoNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExportMemoryAllocateInfoNV*>( this );
+    }
+
+    operator VkExportMemoryAllocateInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExportMemoryAllocateInfoNV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ExportMemoryAllocateInfoNV const& ) const = default;
+#else
+    bool operator==( ExportMemoryAllocateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( handleTypes == rhs.handleTypes );
+    }
+
+    bool operator!=( ExportMemoryAllocateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryAllocateInfoNV;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes = {};
+
+  };
+  static_assert( sizeof( ExportMemoryAllocateInfoNV ) == sizeof( VkExportMemoryAllocateInfoNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ExportMemoryAllocateInfoNV>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eExportMemoryAllocateInfoNV>
+  {
+    using Type = ExportMemoryAllocateInfoNV;
+  };
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  struct ExportMemoryWin32HandleInfoKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryWin32HandleInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoKHR(const SECURITY_ATTRIBUTES* pAttributes_ = {}, DWORD dwAccess_ = {}, LPCWSTR name_ = {}) VULKAN_HPP_NOEXCEPT
+    : pAttributes( pAttributes_ ), dwAccess( dwAccess_ ), name( name_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoKHR( ExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ExportMemoryWin32HandleInfoKHR( VkExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ExportMemoryWin32HandleInfoKHR & operator=( VkExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+    ExportMemoryWin32HandleInfoKHR & operator=( ExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ExportMemoryWin32HandleInfoKHR ) );
+      return *this;
+    }
+
+    ExportMemoryWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ExportMemoryWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pAttributes = pAttributes_;
+      return *this;
+    }
+
+    ExportMemoryWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dwAccess = dwAccess_;
+      return *this;
+    }
+
+    ExportMemoryWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT
+    {
+      name = name_;
+      return *this;
+    }
+
+
+    operator VkExportMemoryWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExportMemoryWin32HandleInfoKHR*>( this );
+    }
+
+    operator VkExportMemoryWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExportMemoryWin32HandleInfoKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ExportMemoryWin32HandleInfoKHR const& ) const = default;
+#else
+    bool operator==( ExportMemoryWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pAttributes == rhs.pAttributes )
+          && ( dwAccess == rhs.dwAccess )
+          && ( name == rhs.name );
+    }
+
+    bool operator!=( ExportMemoryWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryWin32HandleInfoKHR;
+    const void* pNext = {};
+    const SECURITY_ATTRIBUTES* pAttributes = {};
+    DWORD dwAccess = {};
+    LPCWSTR name = {};
+
+  };
+  static_assert( sizeof( ExportMemoryWin32HandleInfoKHR ) == sizeof( VkExportMemoryWin32HandleInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ExportMemoryWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eExportMemoryWin32HandleInfoKHR>
+  {
+    using Type = ExportMemoryWin32HandleInfoKHR;
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  struct ExportMemoryWin32HandleInfoNV
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryWin32HandleInfoNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoNV(const SECURITY_ATTRIBUTES* pAttributes_ = {}, DWORD dwAccess_ = {}) VULKAN_HPP_NOEXCEPT
+    : pAttributes( pAttributes_ ), dwAccess( dwAccess_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoNV( ExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ExportMemoryWin32HandleInfoNV( VkExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ExportMemoryWin32HandleInfoNV & operator=( VkExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoNV const *>( &rhs );
+      return *this;
+    }
+
+    ExportMemoryWin32HandleInfoNV & operator=( ExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ExportMemoryWin32HandleInfoNV ) );
+      return *this;
+    }
+
+    ExportMemoryWin32HandleInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ExportMemoryWin32HandleInfoNV & setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pAttributes = pAttributes_;
+      return *this;
+    }
+
+    ExportMemoryWin32HandleInfoNV & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dwAccess = dwAccess_;
+      return *this;
+    }
+
+
+    operator VkExportMemoryWin32HandleInfoNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExportMemoryWin32HandleInfoNV*>( this );
+    }
+
+    operator VkExportMemoryWin32HandleInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExportMemoryWin32HandleInfoNV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ExportMemoryWin32HandleInfoNV const& ) const = default;
+#else
+    bool operator==( ExportMemoryWin32HandleInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pAttributes == rhs.pAttributes )
+          && ( dwAccess == rhs.dwAccess );
+    }
+
+    bool operator!=( ExportMemoryWin32HandleInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryWin32HandleInfoNV;
+    const void* pNext = {};
+    const SECURITY_ATTRIBUTES* pAttributes = {};
+    DWORD dwAccess = {};
+
+  };
+  static_assert( sizeof( ExportMemoryWin32HandleInfoNV ) == sizeof( VkExportMemoryWin32HandleInfoNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ExportMemoryWin32HandleInfoNV>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eExportMemoryWin32HandleInfoNV>
+  {
+    using Type = ExportMemoryWin32HandleInfoNV;
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  struct ExportSemaphoreCreateInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportSemaphoreCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ExportSemaphoreCreateInfo(VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes_ = {}) VULKAN_HPP_NOEXCEPT
+    : handleTypes( handleTypes_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ExportSemaphoreCreateInfo( ExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ExportSemaphoreCreateInfo( VkExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ExportSemaphoreCreateInfo & operator=( VkExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportSemaphoreCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+    ExportSemaphoreCreateInfo & operator=( ExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ExportSemaphoreCreateInfo ) );
+      return *this;
+    }
+
+    ExportSemaphoreCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ExportSemaphoreCreateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleTypes = handleTypes_;
+      return *this;
+    }
+
+
+    operator VkExportSemaphoreCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExportSemaphoreCreateInfo*>( this );
+    }
+
+    operator VkExportSemaphoreCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExportSemaphoreCreateInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ExportSemaphoreCreateInfo const& ) const = default;
+#else
+    bool operator==( ExportSemaphoreCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( handleTypes == rhs.handleTypes );
+    }
+
+    bool operator!=( ExportSemaphoreCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportSemaphoreCreateInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes = {};
+
+  };
+  static_assert( sizeof( ExportSemaphoreCreateInfo ) == sizeof( VkExportSemaphoreCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ExportSemaphoreCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eExportSemaphoreCreateInfo>
+  {
+    using Type = ExportSemaphoreCreateInfo;
+  };
+  using ExportSemaphoreCreateInfoKHR = ExportSemaphoreCreateInfo;
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  struct ExportSemaphoreWin32HandleInfoKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportSemaphoreWin32HandleInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ExportSemaphoreWin32HandleInfoKHR(const SECURITY_ATTRIBUTES* pAttributes_ = {}, DWORD dwAccess_ = {}, LPCWSTR name_ = {}) VULKAN_HPP_NOEXCEPT
+    : pAttributes( pAttributes_ ), dwAccess( dwAccess_ ), name( name_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ExportSemaphoreWin32HandleInfoKHR( ExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ExportSemaphoreWin32HandleInfoKHR( VkExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ExportSemaphoreWin32HandleInfoKHR & operator=( VkExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportSemaphoreWin32HandleInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+    ExportSemaphoreWin32HandleInfoKHR & operator=( ExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ExportSemaphoreWin32HandleInfoKHR ) );
+      return *this;
+    }
+
+    ExportSemaphoreWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ExportSemaphoreWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pAttributes = pAttributes_;
+      return *this;
+    }
+
+    ExportSemaphoreWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dwAccess = dwAccess_;
+      return *this;
+    }
+
+    ExportSemaphoreWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT
+    {
+      name = name_;
+      return *this;
+    }
+
+
+    operator VkExportSemaphoreWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExportSemaphoreWin32HandleInfoKHR*>( this );
+    }
+
+    operator VkExportSemaphoreWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExportSemaphoreWin32HandleInfoKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ExportSemaphoreWin32HandleInfoKHR const& ) const = default;
+#else
+    bool operator==( ExportSemaphoreWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pAttributes == rhs.pAttributes )
+          && ( dwAccess == rhs.dwAccess )
+          && ( name == rhs.name );
+    }
+
+    bool operator!=( ExportSemaphoreWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportSemaphoreWin32HandleInfoKHR;
+    const void* pNext = {};
+    const SECURITY_ATTRIBUTES* pAttributes = {};
+    DWORD dwAccess = {};
+    LPCWSTR name = {};
+
+  };
+  static_assert( sizeof( ExportSemaphoreWin32HandleInfoKHR ) == sizeof( VkExportSemaphoreWin32HandleInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ExportSemaphoreWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eExportSemaphoreWin32HandleInfoKHR>
+  {
+    using Type = ExportSemaphoreWin32HandleInfoKHR;
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+  struct ExternalFormatANDROID
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalFormatANDROID;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ExternalFormatANDROID(uint64_t externalFormat_ = {}) VULKAN_HPP_NOEXCEPT
+    : externalFormat( externalFormat_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ExternalFormatANDROID( ExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ExternalFormatANDROID( VkExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ExternalFormatANDROID & operator=( VkExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalFormatANDROID const *>( &rhs );
+      return *this;
+    }
+
+    ExternalFormatANDROID & operator=( ExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ExternalFormatANDROID ) );
+      return *this;
+    }
+
+    ExternalFormatANDROID & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ExternalFormatANDROID & setExternalFormat( uint64_t externalFormat_ ) VULKAN_HPP_NOEXCEPT
+    {
+      externalFormat = externalFormat_;
+      return *this;
+    }
+
+
+    operator VkExternalFormatANDROID const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExternalFormatANDROID*>( this );
+    }
+
+    operator VkExternalFormatANDROID &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExternalFormatANDROID*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ExternalFormatANDROID const& ) const = default;
+#else
+    bool operator==( ExternalFormatANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( externalFormat == rhs.externalFormat );
+    }
+
+    bool operator!=( ExternalFormatANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalFormatANDROID;
+    void* pNext = {};
+    uint64_t externalFormat = {};
+
+  };
+  static_assert( sizeof( ExternalFormatANDROID ) == sizeof( VkExternalFormatANDROID ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ExternalFormatANDROID>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eExternalFormatANDROID>
+  {
+    using Type = ExternalFormatANDROID;
+  };
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+  struct ExternalImageFormatProperties
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalImageFormatProperties;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ExternalImageFormatProperties(VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties_ = {}) VULKAN_HPP_NOEXCEPT
+    : externalMemoryProperties( externalMemoryProperties_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ExternalImageFormatProperties( ExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ExternalImageFormatProperties( VkExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ExternalImageFormatProperties & operator=( VkExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalImageFormatProperties const *>( &rhs );
+      return *this;
+    }
+
+    ExternalImageFormatProperties & operator=( ExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ExternalImageFormatProperties ) );
+      return *this;
+    }
+
+
+    operator VkExternalImageFormatProperties const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExternalImageFormatProperties*>( this );
+    }
+
+    operator VkExternalImageFormatProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExternalImageFormatProperties*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ExternalImageFormatProperties const& ) const = default;
+#else
+    bool operator==( ExternalImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( externalMemoryProperties == rhs.externalMemoryProperties );
+    }
+
+    bool operator!=( ExternalImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalImageFormatProperties;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties = {};
+
+  };
+  static_assert( sizeof( ExternalImageFormatProperties ) == sizeof( VkExternalImageFormatProperties ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ExternalImageFormatProperties>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eExternalImageFormatProperties>
+  {
+    using Type = ExternalImageFormatProperties;
+  };
+  using ExternalImageFormatPropertiesKHR = ExternalImageFormatProperties;
+
+  struct ExternalMemoryBufferCreateInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryBufferCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ExternalMemoryBufferCreateInfo(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {}) VULKAN_HPP_NOEXCEPT
+    : handleTypes( handleTypes_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ExternalMemoryBufferCreateInfo( ExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ExternalMemoryBufferCreateInfo( VkExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ExternalMemoryBufferCreateInfo & operator=( VkExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalMemoryBufferCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+    ExternalMemoryBufferCreateInfo & operator=( ExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ExternalMemoryBufferCreateInfo ) );
+      return *this;
+    }
+
+    ExternalMemoryBufferCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ExternalMemoryBufferCreateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleTypes = handleTypes_;
+      return *this;
+    }
+
+
+    operator VkExternalMemoryBufferCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExternalMemoryBufferCreateInfo*>( this );
+    }
+
+    operator VkExternalMemoryBufferCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExternalMemoryBufferCreateInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ExternalMemoryBufferCreateInfo const& ) const = default;
+#else
+    bool operator==( ExternalMemoryBufferCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( handleTypes == rhs.handleTypes );
+    }
+
+    bool operator!=( ExternalMemoryBufferCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryBufferCreateInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {};
+
+  };
+  static_assert( sizeof( ExternalMemoryBufferCreateInfo ) == sizeof( VkExternalMemoryBufferCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ExternalMemoryBufferCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eExternalMemoryBufferCreateInfo>
+  {
+    using Type = ExternalMemoryBufferCreateInfo;
+  };
+  using ExternalMemoryBufferCreateInfoKHR = ExternalMemoryBufferCreateInfo;
+
+  struct ExternalMemoryImageCreateInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryImageCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfo(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {}) VULKAN_HPP_NOEXCEPT
+    : handleTypes( handleTypes_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfo( ExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ExternalMemoryImageCreateInfo( VkExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ExternalMemoryImageCreateInfo & operator=( VkExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+    ExternalMemoryImageCreateInfo & operator=( ExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ExternalMemoryImageCreateInfo ) );
+      return *this;
+    }
+
+    ExternalMemoryImageCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ExternalMemoryImageCreateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleTypes = handleTypes_;
+      return *this;
+    }
+
+
+    operator VkExternalMemoryImageCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExternalMemoryImageCreateInfo*>( this );
+    }
+
+    operator VkExternalMemoryImageCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExternalMemoryImageCreateInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ExternalMemoryImageCreateInfo const& ) const = default;
+#else
+    bool operator==( ExternalMemoryImageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( handleTypes == rhs.handleTypes );
+    }
+
+    bool operator!=( ExternalMemoryImageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryImageCreateInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {};
+
+  };
+  static_assert( sizeof( ExternalMemoryImageCreateInfo ) == sizeof( VkExternalMemoryImageCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ExternalMemoryImageCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eExternalMemoryImageCreateInfo>
+  {
+    using Type = ExternalMemoryImageCreateInfo;
+  };
+  using ExternalMemoryImageCreateInfoKHR = ExternalMemoryImageCreateInfo;
+
+  struct ExternalMemoryImageCreateInfoNV
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryImageCreateInfoNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfoNV(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ = {}) VULKAN_HPP_NOEXCEPT
+    : handleTypes( handleTypes_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfoNV( ExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ExternalMemoryImageCreateInfoNV( VkExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ExternalMemoryImageCreateInfoNV & operator=( VkExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfoNV const *>( &rhs );
+      return *this;
+    }
+
+    ExternalMemoryImageCreateInfoNV & operator=( ExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ExternalMemoryImageCreateInfoNV ) );
+      return *this;
+    }
+
+    ExternalMemoryImageCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ExternalMemoryImageCreateInfoNV & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleTypes = handleTypes_;
+      return *this;
+    }
+
+
+    operator VkExternalMemoryImageCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExternalMemoryImageCreateInfoNV*>( this );
+    }
+
+    operator VkExternalMemoryImageCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExternalMemoryImageCreateInfoNV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ExternalMemoryImageCreateInfoNV const& ) const = default;
+#else
+    bool operator==( ExternalMemoryImageCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( handleTypes == rhs.handleTypes );
+    }
+
+    bool operator!=( ExternalMemoryImageCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryImageCreateInfoNV;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes = {};
+
+  };
+  static_assert( sizeof( ExternalMemoryImageCreateInfoNV ) == sizeof( VkExternalMemoryImageCreateInfoNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ExternalMemoryImageCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eExternalMemoryImageCreateInfoNV>
+  {
+    using Type = ExternalMemoryImageCreateInfoNV;
+  };
+
+  struct FilterCubicImageViewImageFormatPropertiesEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFilterCubicImageViewImageFormatPropertiesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR FilterCubicImageViewImageFormatPropertiesEXT(VULKAN_HPP_NAMESPACE::Bool32 filterCubic_ = {}, VULKAN_HPP_NAMESPACE::Bool32 filterCubicMinmax_ = {}) VULKAN_HPP_NOEXCEPT
+    : filterCubic( filterCubic_ ), filterCubicMinmax( filterCubicMinmax_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR FilterCubicImageViewImageFormatPropertiesEXT( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    FilterCubicImageViewImageFormatPropertiesEXT( VkFilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    FilterCubicImageViewImageFormatPropertiesEXT & operator=( VkFilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FilterCubicImageViewImageFormatPropertiesEXT const *>( &rhs );
+      return *this;
+    }
+
+    FilterCubicImageViewImageFormatPropertiesEXT & operator=( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( FilterCubicImageViewImageFormatPropertiesEXT ) );
+      return *this;
+    }
+
+
+    operator VkFilterCubicImageViewImageFormatPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkFilterCubicImageViewImageFormatPropertiesEXT*>( this );
+    }
+
+    operator VkFilterCubicImageViewImageFormatPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkFilterCubicImageViewImageFormatPropertiesEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( FilterCubicImageViewImageFormatPropertiesEXT const& ) const = default;
+#else
+    bool operator==( FilterCubicImageViewImageFormatPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( filterCubic == rhs.filterCubic )
+          && ( filterCubicMinmax == rhs.filterCubicMinmax );
+    }
+
+    bool operator!=( FilterCubicImageViewImageFormatPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFilterCubicImageViewImageFormatPropertiesEXT;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 filterCubic = {};
+    VULKAN_HPP_NAMESPACE::Bool32 filterCubicMinmax = {};
+
+  };
+  static_assert( sizeof( FilterCubicImageViewImageFormatPropertiesEXT ) == sizeof( VkFilterCubicImageViewImageFormatPropertiesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<FilterCubicImageViewImageFormatPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eFilterCubicImageViewImageFormatPropertiesEXT>
+  {
+    using Type = FilterCubicImageViewImageFormatPropertiesEXT;
+  };
+
+  struct FragmentShadingRateAttachmentInfoKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFragmentShadingRateAttachmentInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR FragmentShadingRateAttachmentInfoKHR(const VULKAN_HPP_NAMESPACE::AttachmentReference2* pFragmentShadingRateAttachment_ = {}, VULKAN_HPP_NAMESPACE::Extent2D shadingRateAttachmentTexelSize_ = {}) VULKAN_HPP_NOEXCEPT
+    : pFragmentShadingRateAttachment( pFragmentShadingRateAttachment_ ), shadingRateAttachmentTexelSize( shadingRateAttachmentTexelSize_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR FragmentShadingRateAttachmentInfoKHR( FragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    FragmentShadingRateAttachmentInfoKHR( VkFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    FragmentShadingRateAttachmentInfoKHR & operator=( VkFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FragmentShadingRateAttachmentInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+    FragmentShadingRateAttachmentInfoKHR & operator=( FragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( FragmentShadingRateAttachmentInfoKHR ) );
+      return *this;
+    }
+
+    FragmentShadingRateAttachmentInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    FragmentShadingRateAttachmentInfoKHR & setPFragmentShadingRateAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference2* pFragmentShadingRateAttachment_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pFragmentShadingRateAttachment = pFragmentShadingRateAttachment_;
+      return *this;
+    }
+
+    FragmentShadingRateAttachmentInfoKHR & setShadingRateAttachmentTexelSize( VULKAN_HPP_NAMESPACE::Extent2D const & shadingRateAttachmentTexelSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shadingRateAttachmentTexelSize = shadingRateAttachmentTexelSize_;
+      return *this;
+    }
+
+
+    operator VkFragmentShadingRateAttachmentInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkFragmentShadingRateAttachmentInfoKHR*>( this );
+    }
+
+    operator VkFragmentShadingRateAttachmentInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkFragmentShadingRateAttachmentInfoKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( FragmentShadingRateAttachmentInfoKHR const& ) const = default;
+#else
+    bool operator==( FragmentShadingRateAttachmentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pFragmentShadingRateAttachment == rhs.pFragmentShadingRateAttachment )
+          && ( shadingRateAttachmentTexelSize == rhs.shadingRateAttachmentTexelSize );
+    }
+
+    bool operator!=( FragmentShadingRateAttachmentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFragmentShadingRateAttachmentInfoKHR;
+    const void* pNext = {};
+    const VULKAN_HPP_NAMESPACE::AttachmentReference2* pFragmentShadingRateAttachment = {};
+    VULKAN_HPP_NAMESPACE::Extent2D shadingRateAttachmentTexelSize = {};
+
+  };
+  static_assert( sizeof( FragmentShadingRateAttachmentInfoKHR ) == sizeof( VkFragmentShadingRateAttachmentInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<FragmentShadingRateAttachmentInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eFragmentShadingRateAttachmentInfoKHR>
+  {
+    using Type = FragmentShadingRateAttachmentInfoKHR;
+  };
+
+  struct FramebufferAttachmentImageInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferAttachmentImageInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR FramebufferAttachmentImageInfo(VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, uint32_t width_ = {}, uint32_t height_ = {}, uint32_t layerCount_ = {}, uint32_t viewFormatCount_ = {}, const VULKAN_HPP_NAMESPACE::Format* pViewFormats_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), usage( usage_ ), width( width_ ), height( height_ ), layerCount( layerCount_ ), viewFormatCount( viewFormatCount_ ), pViewFormats( pViewFormats_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR FramebufferAttachmentImageInfo( FramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    FramebufferAttachmentImageInfo( VkFramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    FramebufferAttachmentImageInfo( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_, uint32_t width_, uint32_t height_, uint32_t layerCount_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & viewFormats_ )
+    : flags( flags_ ), usage( usage_ ), width( width_ ), height( height_ ), layerCount( layerCount_ ), viewFormatCount( static_cast<uint32_t>( viewFormats_.size() ) ), pViewFormats( viewFormats_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    FramebufferAttachmentImageInfo & operator=( VkFramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo const *>( &rhs );
+      return *this;
+    }
+
+    FramebufferAttachmentImageInfo & operator=( FramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( FramebufferAttachmentImageInfo ) );
+      return *this;
+    }
+
+    FramebufferAttachmentImageInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    FramebufferAttachmentImageInfo & setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
     {
       flags = flags_;
       return *this;
     }
 
-    RenderPassCreateInfo2KHR& setAttachmentCount( uint32_t attachmentCount_ )
+    FramebufferAttachmentImageInfo & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
     {
-      attachmentCount = attachmentCount_;
+      usage = usage_;
       return *this;
     }
 
-    RenderPassCreateInfo2KHR& setPAttachments( const AttachmentDescription2KHR* pAttachments_ )
+    FramebufferAttachmentImageInfo & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT
     {
-      pAttachments = pAttachments_;
+      width = width_;
       return *this;
     }
 
-    RenderPassCreateInfo2KHR& setSubpassCount( uint32_t subpassCount_ )
+    FramebufferAttachmentImageInfo & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT
     {
-      subpassCount = subpassCount_;
+      height = height_;
       return *this;
     }
 
-    RenderPassCreateInfo2KHR& setPSubpasses( const SubpassDescription2KHR* pSubpasses_ )
+    FramebufferAttachmentImageInfo & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT
     {
-      pSubpasses = pSubpasses_;
+      layerCount = layerCount_;
       return *this;
     }
 
-    RenderPassCreateInfo2KHR& setDependencyCount( uint32_t dependencyCount_ )
+    FramebufferAttachmentImageInfo & setViewFormatCount( uint32_t viewFormatCount_ ) VULKAN_HPP_NOEXCEPT
     {
-      dependencyCount = dependencyCount_;
+      viewFormatCount = viewFormatCount_;
       return *this;
     }
 
-    RenderPassCreateInfo2KHR& setPDependencies( const SubpassDependency2KHR* pDependencies_ )
+    FramebufferAttachmentImageInfo & setPViewFormats( const VULKAN_HPP_NAMESPACE::Format* pViewFormats_ ) VULKAN_HPP_NOEXCEPT
     {
-      pDependencies = pDependencies_;
+      pViewFormats = pViewFormats_;
       return *this;
     }
 
-    RenderPassCreateInfo2KHR& setCorrelatedViewMaskCount( uint32_t correlatedViewMaskCount_ )
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    FramebufferAttachmentImageInfo & setViewFormats( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & viewFormats_ ) VULKAN_HPP_NOEXCEPT
     {
-      correlatedViewMaskCount = correlatedViewMaskCount_;
+      viewFormatCount = static_cast<uint32_t>( viewFormats_.size() );
+      pViewFormats = viewFormats_.data();
       return *this;
     }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
 
-    RenderPassCreateInfo2KHR& setPCorrelatedViewMasks( const uint32_t* pCorrelatedViewMasks_ )
+
+    operator VkFramebufferAttachmentImageInfo const&() const VULKAN_HPP_NOEXCEPT
     {
-      pCorrelatedViewMasks = pCorrelatedViewMasks_;
-      return *this;
+      return *reinterpret_cast<const VkFramebufferAttachmentImageInfo*>( this );
     }
 
-    operator VkRenderPassCreateInfo2KHR const&() const
+    operator VkFramebufferAttachmentImageInfo &() VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkRenderPassCreateInfo2KHR*>(this);
+      return *reinterpret_cast<VkFramebufferAttachmentImageInfo*>( this );
     }
 
-    operator VkRenderPassCreateInfo2KHR &()
-    {
-      return *reinterpret_cast<VkRenderPassCreateInfo2KHR*>(this);
-    }
 
-    bool operator==( RenderPassCreateInfo2KHR const& rhs ) const
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( FramebufferAttachmentImageInfo const& ) const = default;
+#else
+    bool operator==( FramebufferAttachmentImageInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
           && ( flags == rhs.flags )
-          && ( attachmentCount == rhs.attachmentCount )
-          && ( pAttachments == rhs.pAttachments )
-          && ( subpassCount == rhs.subpassCount )
-          && ( pSubpasses == rhs.pSubpasses )
-          && ( dependencyCount == rhs.dependencyCount )
-          && ( pDependencies == rhs.pDependencies )
-          && ( correlatedViewMaskCount == rhs.correlatedViewMaskCount )
-          && ( pCorrelatedViewMasks == rhs.pCorrelatedViewMasks );
+          && ( usage == rhs.usage )
+          && ( width == rhs.width )
+          && ( height == rhs.height )
+          && ( layerCount == rhs.layerCount )
+          && ( viewFormatCount == rhs.viewFormatCount )
+          && ( pViewFormats == rhs.pViewFormats );
     }
 
-    bool operator!=( RenderPassCreateInfo2KHR const& rhs ) const
+    bool operator!=( FramebufferAttachmentImageInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
+#endif
 
-  private:
-    StructureType sType = StructureType::eRenderPassCreateInfo2KHR;
+
 
   public:
-    const void* pNext = nullptr;
-    RenderPassCreateFlags flags;
-    uint32_t attachmentCount;
-    const AttachmentDescription2KHR* pAttachments;
-    uint32_t subpassCount;
-    const SubpassDescription2KHR* pSubpasses;
-    uint32_t dependencyCount;
-    const SubpassDependency2KHR* pDependencies;
-    uint32_t correlatedViewMaskCount;
-    const uint32_t* pCorrelatedViewMasks;
-  };
-  static_assert( sizeof( RenderPassCreateInfo2KHR ) == sizeof( VkRenderPassCreateInfo2KHR ), "struct and wrapper have different size!" );
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferAttachmentImageInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {};
+    VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {};
+    uint32_t width = {};
+    uint32_t height = {};
+    uint32_t layerCount = {};
+    uint32_t viewFormatCount = {};
+    const VULKAN_HPP_NAMESPACE::Format* pViewFormats = {};
 
-  enum class PointClippingBehavior
+  };
+  static_assert( sizeof( FramebufferAttachmentImageInfo ) == sizeof( VkFramebufferAttachmentImageInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<FramebufferAttachmentImageInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eFramebufferAttachmentImageInfo>
   {
-    eAllClipPlanes = VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES,
-    eAllClipPlanesKHR = VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES,
-    eUserClipPlanesOnly = VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY,
-    eUserClipPlanesOnlyKHR = VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY
+    using Type = FramebufferAttachmentImageInfo;
+  };
+  using FramebufferAttachmentImageInfoKHR = FramebufferAttachmentImageInfo;
+
+  struct FramebufferAttachmentsCreateInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferAttachmentsCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR FramebufferAttachmentsCreateInfo(uint32_t attachmentImageInfoCount_ = {}, const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo* pAttachmentImageInfos_ = {}) VULKAN_HPP_NOEXCEPT
+    : attachmentImageInfoCount( attachmentImageInfoCount_ ), pAttachmentImageInfos( pAttachmentImageInfos_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR FramebufferAttachmentsCreateInfo( FramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    FramebufferAttachmentsCreateInfo( VkFramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    FramebufferAttachmentsCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo> const & attachmentImageInfos_ )
+    : attachmentImageInfoCount( static_cast<uint32_t>( attachmentImageInfos_.size() ) ), pAttachmentImageInfos( attachmentImageInfos_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    FramebufferAttachmentsCreateInfo & operator=( VkFramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FramebufferAttachmentsCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+    FramebufferAttachmentsCreateInfo & operator=( FramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( FramebufferAttachmentsCreateInfo ) );
+      return *this;
+    }
+
+    FramebufferAttachmentsCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    FramebufferAttachmentsCreateInfo & setAttachmentImageInfoCount( uint32_t attachmentImageInfoCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      attachmentImageInfoCount = attachmentImageInfoCount_;
+      return *this;
+    }
+
+    FramebufferAttachmentsCreateInfo & setPAttachmentImageInfos( const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo* pAttachmentImageInfos_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pAttachmentImageInfos = pAttachmentImageInfos_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    FramebufferAttachmentsCreateInfo & setAttachmentImageInfos( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo> const & attachmentImageInfos_ ) VULKAN_HPP_NOEXCEPT
+    {
+      attachmentImageInfoCount = static_cast<uint32_t>( attachmentImageInfos_.size() );
+      pAttachmentImageInfos = attachmentImageInfos_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkFramebufferAttachmentsCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkFramebufferAttachmentsCreateInfo*>( this );
+    }
+
+    operator VkFramebufferAttachmentsCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkFramebufferAttachmentsCreateInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( FramebufferAttachmentsCreateInfo const& ) const = default;
+#else
+    bool operator==( FramebufferAttachmentsCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( attachmentImageInfoCount == rhs.attachmentImageInfoCount )
+          && ( pAttachmentImageInfos == rhs.pAttachmentImageInfos );
+    }
+
+    bool operator!=( FramebufferAttachmentsCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferAttachmentsCreateInfo;
+    const void* pNext = {};
+    uint32_t attachmentImageInfoCount = {};
+    const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo* pAttachmentImageInfos = {};
+
+  };
+  static_assert( sizeof( FramebufferAttachmentsCreateInfo ) == sizeof( VkFramebufferAttachmentsCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<FramebufferAttachmentsCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eFramebufferAttachmentsCreateInfo>
+  {
+    using Type = FramebufferAttachmentsCreateInfo;
+  };
+  using FramebufferAttachmentsCreateInfoKHR = FramebufferAttachmentsCreateInfo;
+
+  struct GraphicsShaderGroupCreateInfoNV
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGraphicsShaderGroupCreateInfoNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR GraphicsShaderGroupCreateInfoNV(uint32_t stageCount_ = {}, const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ = {}, const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo* pVertexInputState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo* pTessellationState_ = {}) VULKAN_HPP_NOEXCEPT
+    : stageCount( stageCount_ ), pStages( pStages_ ), pVertexInputState( pVertexInputState_ ), pTessellationState( pTessellationState_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR GraphicsShaderGroupCreateInfoNV( GraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    GraphicsShaderGroupCreateInfoNV( VkGraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    GraphicsShaderGroupCreateInfoNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const & stages_, const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo* pVertexInputState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo* pTessellationState_ = {} )
+    : stageCount( static_cast<uint32_t>( stages_.size() ) ), pStages( stages_.data() ), pVertexInputState( pVertexInputState_ ), pTessellationState( pTessellationState_ )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    GraphicsShaderGroupCreateInfoNV & operator=( VkGraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV const *>( &rhs );
+      return *this;
+    }
+
+    GraphicsShaderGroupCreateInfoNV & operator=( GraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( GraphicsShaderGroupCreateInfoNV ) );
+      return *this;
+    }
+
+    GraphicsShaderGroupCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    GraphicsShaderGroupCreateInfoNV & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stageCount = stageCount_;
+      return *this;
+    }
+
+    GraphicsShaderGroupCreateInfoNV & setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pStages = pStages_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    GraphicsShaderGroupCreateInfoNV & setStages( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const & stages_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stageCount = static_cast<uint32_t>( stages_.size() );
+      pStages = stages_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    GraphicsShaderGroupCreateInfoNV & setPVertexInputState( const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo* pVertexInputState_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pVertexInputState = pVertexInputState_;
+      return *this;
+    }
+
+    GraphicsShaderGroupCreateInfoNV & setPTessellationState( const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo* pTessellationState_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pTessellationState = pTessellationState_;
+      return *this;
+    }
+
+
+    operator VkGraphicsShaderGroupCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkGraphicsShaderGroupCreateInfoNV*>( this );
+    }
+
+    operator VkGraphicsShaderGroupCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkGraphicsShaderGroupCreateInfoNV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( GraphicsShaderGroupCreateInfoNV const& ) const = default;
+#else
+    bool operator==( GraphicsShaderGroupCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( stageCount == rhs.stageCount )
+          && ( pStages == rhs.pStages )
+          && ( pVertexInputState == rhs.pVertexInputState )
+          && ( pTessellationState == rhs.pTessellationState );
+    }
+
+    bool operator!=( GraphicsShaderGroupCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsShaderGroupCreateInfoNV;
+    const void* pNext = {};
+    uint32_t stageCount = {};
+    const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages = {};
+    const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo* pVertexInputState = {};
+    const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo* pTessellationState = {};
+
+  };
+  static_assert( sizeof( GraphicsShaderGroupCreateInfoNV ) == sizeof( VkGraphicsShaderGroupCreateInfoNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<GraphicsShaderGroupCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eGraphicsShaderGroupCreateInfoNV>
+  {
+    using Type = GraphicsShaderGroupCreateInfoNV;
+  };
+
+  struct GraphicsPipelineShaderGroupsCreateInfoNV
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR GraphicsPipelineShaderGroupsCreateInfoNV(uint32_t groupCount_ = {}, const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV* pGroups_ = {}, uint32_t pipelineCount_ = {}, const VULKAN_HPP_NAMESPACE::Pipeline* pPipelines_ = {}) VULKAN_HPP_NOEXCEPT
+    : groupCount( groupCount_ ), pGroups( pGroups_ ), pipelineCount( pipelineCount_ ), pPipelines( pPipelines_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR GraphicsPipelineShaderGroupsCreateInfoNV( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    GraphicsPipelineShaderGroupsCreateInfoNV( VkGraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    GraphicsPipelineShaderGroupsCreateInfoNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV> const & groups_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Pipeline> const & pipelines_ = {} )
+    : groupCount( static_cast<uint32_t>( groups_.size() ) ), pGroups( groups_.data() ), pipelineCount( static_cast<uint32_t>( pipelines_.size() ) ), pPipelines( pipelines_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    GraphicsPipelineShaderGroupsCreateInfoNV & operator=( VkGraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GraphicsPipelineShaderGroupsCreateInfoNV const *>( &rhs );
+      return *this;
+    }
+
+    GraphicsPipelineShaderGroupsCreateInfoNV & operator=( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( GraphicsPipelineShaderGroupsCreateInfoNV ) );
+      return *this;
+    }
+
+    GraphicsPipelineShaderGroupsCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    GraphicsPipelineShaderGroupsCreateInfoNV & setGroupCount( uint32_t groupCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      groupCount = groupCount_;
+      return *this;
+    }
+
+    GraphicsPipelineShaderGroupsCreateInfoNV & setPGroups( const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV* pGroups_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pGroups = pGroups_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    GraphicsPipelineShaderGroupsCreateInfoNV & setGroups( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV> const & groups_ ) VULKAN_HPP_NOEXCEPT
+    {
+      groupCount = static_cast<uint32_t>( groups_.size() );
+      pGroups = groups_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    GraphicsPipelineShaderGroupsCreateInfoNV & setPipelineCount( uint32_t pipelineCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipelineCount = pipelineCount_;
+      return *this;
+    }
+
+    GraphicsPipelineShaderGroupsCreateInfoNV & setPPipelines( const VULKAN_HPP_NAMESPACE::Pipeline* pPipelines_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pPipelines = pPipelines_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    GraphicsPipelineShaderGroupsCreateInfoNV & setPipelines( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Pipeline> const & pipelines_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipelineCount = static_cast<uint32_t>( pipelines_.size() );
+      pPipelines = pipelines_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkGraphicsPipelineShaderGroupsCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkGraphicsPipelineShaderGroupsCreateInfoNV*>( this );
+    }
+
+    operator VkGraphicsPipelineShaderGroupsCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkGraphicsPipelineShaderGroupsCreateInfoNV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( GraphicsPipelineShaderGroupsCreateInfoNV const& ) const = default;
+#else
+    bool operator==( GraphicsPipelineShaderGroupsCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( groupCount == rhs.groupCount )
+          && ( pGroups == rhs.pGroups )
+          && ( pipelineCount == rhs.pipelineCount )
+          && ( pPipelines == rhs.pPipelines );
+    }
+
+    bool operator!=( GraphicsPipelineShaderGroupsCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV;
+    const void* pNext = {};
+    uint32_t groupCount = {};
+    const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV* pGroups = {};
+    uint32_t pipelineCount = {};
+    const VULKAN_HPP_NAMESPACE::Pipeline* pPipelines = {};
+
+  };
+  static_assert( sizeof( GraphicsPipelineShaderGroupsCreateInfoNV ) == sizeof( VkGraphicsPipelineShaderGroupsCreateInfoNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<GraphicsPipelineShaderGroupsCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV>
+  {
+    using Type = GraphicsPipelineShaderGroupsCreateInfoNV;
+  };
+
+  struct HeadlessSurfaceCreateInfoEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHeadlessSurfaceCreateInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR HeadlessSurfaceCreateInfoEXT(VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR HeadlessSurfaceCreateInfoEXT( HeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    HeadlessSurfaceCreateInfoEXT( VkHeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    HeadlessSurfaceCreateInfoEXT & operator=( VkHeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+    HeadlessSurfaceCreateInfoEXT & operator=( HeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( HeadlessSurfaceCreateInfoEXT ) );
+      return *this;
+    }
+
+    HeadlessSurfaceCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    HeadlessSurfaceCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+
+    operator VkHeadlessSurfaceCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT*>( this );
+    }
+
+    operator VkHeadlessSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkHeadlessSurfaceCreateInfoEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( HeadlessSurfaceCreateInfoEXT const& ) const = default;
+#else
+    bool operator==( HeadlessSurfaceCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags );
+    }
+
+    bool operator!=( HeadlessSurfaceCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHeadlessSurfaceCreateInfoEXT;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags = {};
+
+  };
+  static_assert( sizeof( HeadlessSurfaceCreateInfoEXT ) == sizeof( VkHeadlessSurfaceCreateInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<HeadlessSurfaceCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eHeadlessSurfaceCreateInfoEXT>
+  {
+    using Type = HeadlessSurfaceCreateInfoEXT;
+  };
+
+#ifdef VK_USE_PLATFORM_IOS_MVK
+  struct IOSSurfaceCreateInfoMVK
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIosSurfaceCreateInfoMVK;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR IOSSurfaceCreateInfoMVK(VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags_ = {}, const void* pView_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), pView( pView_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR IOSSurfaceCreateInfoMVK( IOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    IOSSurfaceCreateInfoMVK( VkIOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    IOSSurfaceCreateInfoMVK & operator=( VkIOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK const *>( &rhs );
+      return *this;
+    }
+
+    IOSSurfaceCreateInfoMVK & operator=( IOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( IOSSurfaceCreateInfoMVK ) );
+      return *this;
+    }
+
+    IOSSurfaceCreateInfoMVK & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    IOSSurfaceCreateInfoMVK & setFlags( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    IOSSurfaceCreateInfoMVK & setPView( const void* pView_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pView = pView_;
+      return *this;
+    }
+
+
+    operator VkIOSSurfaceCreateInfoMVK const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkIOSSurfaceCreateInfoMVK*>( this );
+    }
+
+    operator VkIOSSurfaceCreateInfoMVK &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkIOSSurfaceCreateInfoMVK*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( IOSSurfaceCreateInfoMVK const& ) const = default;
+#else
+    bool operator==( IOSSurfaceCreateInfoMVK const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( pView == rhs.pView );
+    }
+
+    bool operator!=( IOSSurfaceCreateInfoMVK const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIosSurfaceCreateInfoMVK;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags = {};
+    const void* pView = {};
+
+  };
+  static_assert( sizeof( IOSSurfaceCreateInfoMVK ) == sizeof( VkIOSSurfaceCreateInfoMVK ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<IOSSurfaceCreateInfoMVK>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eIosSurfaceCreateInfoMVK>
+  {
+    using Type = IOSSurfaceCreateInfoMVK;
+  };
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+
+  struct ImageDrmFormatModifierExplicitCreateInfoEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierExplicitCreateInfoEXT(uint64_t drmFormatModifier_ = {}, uint32_t drmFormatModifierPlaneCount_ = {}, const VULKAN_HPP_NAMESPACE::SubresourceLayout* pPlaneLayouts_ = {}) VULKAN_HPP_NOEXCEPT
+    : drmFormatModifier( drmFormatModifier_ ), drmFormatModifierPlaneCount( drmFormatModifierPlaneCount_ ), pPlaneLayouts( pPlaneLayouts_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierExplicitCreateInfoEXT( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImageDrmFormatModifierExplicitCreateInfoEXT( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    ImageDrmFormatModifierExplicitCreateInfoEXT( uint64_t drmFormatModifier_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubresourceLayout> const & planeLayouts_ )
+    : drmFormatModifier( drmFormatModifier_ ), drmFormatModifierPlaneCount( static_cast<uint32_t>( planeLayouts_.size() ) ), pPlaneLayouts( planeLayouts_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ImageDrmFormatModifierExplicitCreateInfoEXT & operator=( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierExplicitCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+    ImageDrmFormatModifierExplicitCreateInfoEXT & operator=( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ImageDrmFormatModifierExplicitCreateInfoEXT ) );
+      return *this;
+    }
+
+    ImageDrmFormatModifierExplicitCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImageDrmFormatModifierExplicitCreateInfoEXT & setDrmFormatModifier( uint64_t drmFormatModifier_ ) VULKAN_HPP_NOEXCEPT
+    {
+      drmFormatModifier = drmFormatModifier_;
+      return *this;
+    }
+
+    ImageDrmFormatModifierExplicitCreateInfoEXT & setDrmFormatModifierPlaneCount( uint32_t drmFormatModifierPlaneCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      drmFormatModifierPlaneCount = drmFormatModifierPlaneCount_;
+      return *this;
+    }
+
+    ImageDrmFormatModifierExplicitCreateInfoEXT & setPPlaneLayouts( const VULKAN_HPP_NAMESPACE::SubresourceLayout* pPlaneLayouts_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pPlaneLayouts = pPlaneLayouts_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    ImageDrmFormatModifierExplicitCreateInfoEXT & setPlaneLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubresourceLayout> const & planeLayouts_ ) VULKAN_HPP_NOEXCEPT
+    {
+      drmFormatModifierPlaneCount = static_cast<uint32_t>( planeLayouts_.size() );
+      pPlaneLayouts = planeLayouts_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkImageDrmFormatModifierExplicitCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageDrmFormatModifierExplicitCreateInfoEXT*>( this );
+    }
+
+    operator VkImageDrmFormatModifierExplicitCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageDrmFormatModifierExplicitCreateInfoEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ImageDrmFormatModifierExplicitCreateInfoEXT const& ) const = default;
+#else
+    bool operator==( ImageDrmFormatModifierExplicitCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( drmFormatModifier == rhs.drmFormatModifier )
+          && ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount )
+          && ( pPlaneLayouts == rhs.pPlaneLayouts );
+    }
+
+    bool operator!=( ImageDrmFormatModifierExplicitCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT;
+    const void* pNext = {};
+    uint64_t drmFormatModifier = {};
+    uint32_t drmFormatModifierPlaneCount = {};
+    const VULKAN_HPP_NAMESPACE::SubresourceLayout* pPlaneLayouts = {};
+
+  };
+  static_assert( sizeof( ImageDrmFormatModifierExplicitCreateInfoEXT ) == sizeof( VkImageDrmFormatModifierExplicitCreateInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImageDrmFormatModifierExplicitCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT>
+  {
+    using Type = ImageDrmFormatModifierExplicitCreateInfoEXT;
+  };
+
+  struct ImageDrmFormatModifierListCreateInfoEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageDrmFormatModifierListCreateInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierListCreateInfoEXT(uint32_t drmFormatModifierCount_ = {}, const uint64_t* pDrmFormatModifiers_ = {}) VULKAN_HPP_NOEXCEPT
+    : drmFormatModifierCount( drmFormatModifierCount_ ), pDrmFormatModifiers( pDrmFormatModifiers_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierListCreateInfoEXT( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImageDrmFormatModifierListCreateInfoEXT( VkImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    ImageDrmFormatModifierListCreateInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & drmFormatModifiers_ )
+    : drmFormatModifierCount( static_cast<uint32_t>( drmFormatModifiers_.size() ) ), pDrmFormatModifiers( drmFormatModifiers_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ImageDrmFormatModifierListCreateInfoEXT & operator=( VkImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierListCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+    ImageDrmFormatModifierListCreateInfoEXT & operator=( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ImageDrmFormatModifierListCreateInfoEXT ) );
+      return *this;
+    }
+
+    ImageDrmFormatModifierListCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImageDrmFormatModifierListCreateInfoEXT & setDrmFormatModifierCount( uint32_t drmFormatModifierCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      drmFormatModifierCount = drmFormatModifierCount_;
+      return *this;
+    }
+
+    ImageDrmFormatModifierListCreateInfoEXT & setPDrmFormatModifiers( const uint64_t* pDrmFormatModifiers_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pDrmFormatModifiers = pDrmFormatModifiers_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    ImageDrmFormatModifierListCreateInfoEXT & setDrmFormatModifiers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & drmFormatModifiers_ ) VULKAN_HPP_NOEXCEPT
+    {
+      drmFormatModifierCount = static_cast<uint32_t>( drmFormatModifiers_.size() );
+      pDrmFormatModifiers = drmFormatModifiers_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkImageDrmFormatModifierListCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageDrmFormatModifierListCreateInfoEXT*>( this );
+    }
+
+    operator VkImageDrmFormatModifierListCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageDrmFormatModifierListCreateInfoEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ImageDrmFormatModifierListCreateInfoEXT const& ) const = default;
+#else
+    bool operator==( ImageDrmFormatModifierListCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( drmFormatModifierCount == rhs.drmFormatModifierCount )
+          && ( pDrmFormatModifiers == rhs.pDrmFormatModifiers );
+    }
+
+    bool operator!=( ImageDrmFormatModifierListCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierListCreateInfoEXT;
+    const void* pNext = {};
+    uint32_t drmFormatModifierCount = {};
+    const uint64_t* pDrmFormatModifiers = {};
+
+  };
+  static_assert( sizeof( ImageDrmFormatModifierListCreateInfoEXT ) == sizeof( VkImageDrmFormatModifierListCreateInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImageDrmFormatModifierListCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eImageDrmFormatModifierListCreateInfoEXT>
+  {
+    using Type = ImageDrmFormatModifierListCreateInfoEXT;
+  };
+
+  struct ImageFormatListCreateInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageFormatListCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ImageFormatListCreateInfo(uint32_t viewFormatCount_ = {}, const VULKAN_HPP_NAMESPACE::Format* pViewFormats_ = {}) VULKAN_HPP_NOEXCEPT
+    : viewFormatCount( viewFormatCount_ ), pViewFormats( pViewFormats_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImageFormatListCreateInfo( ImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImageFormatListCreateInfo( VkImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    ImageFormatListCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & viewFormats_ )
+    : viewFormatCount( static_cast<uint32_t>( viewFormats_.size() ) ), pViewFormats( viewFormats_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ImageFormatListCreateInfo & operator=( VkImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageFormatListCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+    ImageFormatListCreateInfo & operator=( ImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ImageFormatListCreateInfo ) );
+      return *this;
+    }
+
+    ImageFormatListCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImageFormatListCreateInfo & setViewFormatCount( uint32_t viewFormatCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      viewFormatCount = viewFormatCount_;
+      return *this;
+    }
+
+    ImageFormatListCreateInfo & setPViewFormats( const VULKAN_HPP_NAMESPACE::Format* pViewFormats_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pViewFormats = pViewFormats_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    ImageFormatListCreateInfo & setViewFormats( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & viewFormats_ ) VULKAN_HPP_NOEXCEPT
+    {
+      viewFormatCount = static_cast<uint32_t>( viewFormats_.size() );
+      pViewFormats = viewFormats_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkImageFormatListCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageFormatListCreateInfo*>( this );
+    }
+
+    operator VkImageFormatListCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageFormatListCreateInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ImageFormatListCreateInfo const& ) const = default;
+#else
+    bool operator==( ImageFormatListCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( viewFormatCount == rhs.viewFormatCount )
+          && ( pViewFormats == rhs.pViewFormats );
+    }
+
+    bool operator!=( ImageFormatListCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageFormatListCreateInfo;
+    const void* pNext = {};
+    uint32_t viewFormatCount = {};
+    const VULKAN_HPP_NAMESPACE::Format* pViewFormats = {};
+
+  };
+  static_assert( sizeof( ImageFormatListCreateInfo ) == sizeof( VkImageFormatListCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImageFormatListCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eImageFormatListCreateInfo>
+  {
+    using Type = ImageFormatListCreateInfo;
+  };
+  using ImageFormatListCreateInfoKHR = ImageFormatListCreateInfo;
+
+#ifdef VK_USE_PLATFORM_FUCHSIA
+  struct ImagePipeSurfaceCreateInfoFUCHSIA
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImagepipeSurfaceCreateInfoFUCHSIA;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ImagePipeSurfaceCreateInfoFUCHSIA(VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags_ = {}, zx_handle_t imagePipeHandle_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), imagePipeHandle( imagePipeHandle_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImagePipeSurfaceCreateInfoFUCHSIA( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImagePipeSurfaceCreateInfoFUCHSIA( VkImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ImagePipeSurfaceCreateInfoFUCHSIA & operator=( VkImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA const *>( &rhs );
+      return *this;
+    }
+
+    ImagePipeSurfaceCreateInfoFUCHSIA & operator=( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ImagePipeSurfaceCreateInfoFUCHSIA ) );
+      return *this;
+    }
+
+    ImagePipeSurfaceCreateInfoFUCHSIA & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImagePipeSurfaceCreateInfoFUCHSIA & setFlags( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    ImagePipeSurfaceCreateInfoFUCHSIA & setImagePipeHandle( zx_handle_t imagePipeHandle_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imagePipeHandle = imagePipeHandle_;
+      return *this;
+    }
+
+
+    operator VkImagePipeSurfaceCreateInfoFUCHSIA const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA*>( this );
+    }
+
+    operator VkImagePipeSurfaceCreateInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImagePipeSurfaceCreateInfoFUCHSIA*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ImagePipeSurfaceCreateInfoFUCHSIA const& ) const = default;
+#else
+    bool operator==( ImagePipeSurfaceCreateInfoFUCHSIA const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( memcmp( &imagePipeHandle, &rhs.imagePipeHandle, sizeof( zx_handle_t ) ) == 0 );
+    }
+
+    bool operator!=( ImagePipeSurfaceCreateInfoFUCHSIA const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImagepipeSurfaceCreateInfoFUCHSIA;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags = {};
+    zx_handle_t imagePipeHandle = {};
+
+  };
+  static_assert( sizeof( ImagePipeSurfaceCreateInfoFUCHSIA ) == sizeof( VkImagePipeSurfaceCreateInfoFUCHSIA ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImagePipeSurfaceCreateInfoFUCHSIA>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eImagepipeSurfaceCreateInfoFUCHSIA>
+  {
+    using Type = ImagePipeSurfaceCreateInfoFUCHSIA;
+  };
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+  struct ImagePlaneMemoryRequirementsInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImagePlaneMemoryRequirementsInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ImagePlaneMemoryRequirementsInfo(VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor) VULKAN_HPP_NOEXCEPT
+    : planeAspect( planeAspect_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImagePlaneMemoryRequirementsInfo( ImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImagePlaneMemoryRequirementsInfo( VkImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ImagePlaneMemoryRequirementsInfo & operator=( VkImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImagePlaneMemoryRequirementsInfo const *>( &rhs );
+      return *this;
+    }
+
+    ImagePlaneMemoryRequirementsInfo & operator=( ImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ImagePlaneMemoryRequirementsInfo ) );
+      return *this;
+    }
+
+    ImagePlaneMemoryRequirementsInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImagePlaneMemoryRequirementsInfo & setPlaneAspect( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ ) VULKAN_HPP_NOEXCEPT
+    {
+      planeAspect = planeAspect_;
+      return *this;
+    }
+
+
+    operator VkImagePlaneMemoryRequirementsInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImagePlaneMemoryRequirementsInfo*>( this );
+    }
+
+    operator VkImagePlaneMemoryRequirementsInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImagePlaneMemoryRequirementsInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ImagePlaneMemoryRequirementsInfo const& ) const = default;
+#else
+    bool operator==( ImagePlaneMemoryRequirementsInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( planeAspect == rhs.planeAspect );
+    }
+
+    bool operator!=( ImagePlaneMemoryRequirementsInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImagePlaneMemoryRequirementsInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor;
+
+  };
+  static_assert( sizeof( ImagePlaneMemoryRequirementsInfo ) == sizeof( VkImagePlaneMemoryRequirementsInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImagePlaneMemoryRequirementsInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eImagePlaneMemoryRequirementsInfo>
+  {
+    using Type = ImagePlaneMemoryRequirementsInfo;
+  };
+  using ImagePlaneMemoryRequirementsInfoKHR = ImagePlaneMemoryRequirementsInfo;
+
+  struct ImageStencilUsageCreateInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageStencilUsageCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ImageStencilUsageCreateInfo(VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage_ = {}) VULKAN_HPP_NOEXCEPT
+    : stencilUsage( stencilUsage_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImageStencilUsageCreateInfo( ImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImageStencilUsageCreateInfo( VkImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ImageStencilUsageCreateInfo & operator=( VkImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageStencilUsageCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+    ImageStencilUsageCreateInfo & operator=( ImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ImageStencilUsageCreateInfo ) );
+      return *this;
+    }
+
+    ImageStencilUsageCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImageStencilUsageCreateInfo & setStencilUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stencilUsage = stencilUsage_;
+      return *this;
+    }
+
+
+    operator VkImageStencilUsageCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageStencilUsageCreateInfo*>( this );
+    }
+
+    operator VkImageStencilUsageCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageStencilUsageCreateInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ImageStencilUsageCreateInfo const& ) const = default;
+#else
+    bool operator==( ImageStencilUsageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( stencilUsage == rhs.stencilUsage );
+    }
+
+    bool operator!=( ImageStencilUsageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageStencilUsageCreateInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage = {};
+
+  };
+  static_assert( sizeof( ImageStencilUsageCreateInfo ) == sizeof( VkImageStencilUsageCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImageStencilUsageCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eImageStencilUsageCreateInfo>
+  {
+    using Type = ImageStencilUsageCreateInfo;
+  };
+  using ImageStencilUsageCreateInfoEXT = ImageStencilUsageCreateInfo;
+
+  struct ImageSwapchainCreateInfoKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageSwapchainCreateInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ImageSwapchainCreateInfoKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {}) VULKAN_HPP_NOEXCEPT
+    : swapchain( swapchain_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImageSwapchainCreateInfoKHR( ImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImageSwapchainCreateInfoKHR( VkImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ImageSwapchainCreateInfoKHR & operator=( VkImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSwapchainCreateInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+    ImageSwapchainCreateInfoKHR & operator=( ImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ImageSwapchainCreateInfoKHR ) );
+      return *this;
+    }
+
+    ImageSwapchainCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImageSwapchainCreateInfoKHR & setSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT
+    {
+      swapchain = swapchain_;
+      return *this;
+    }
+
+
+    operator VkImageSwapchainCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageSwapchainCreateInfoKHR*>( this );
+    }
+
+    operator VkImageSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageSwapchainCreateInfoKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ImageSwapchainCreateInfoKHR const& ) const = default;
+#else
+    bool operator==( ImageSwapchainCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( swapchain == rhs.swapchain );
+    }
+
+    bool operator!=( ImageSwapchainCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageSwapchainCreateInfoKHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {};
+
+  };
+  static_assert( sizeof( ImageSwapchainCreateInfoKHR ) == sizeof( VkImageSwapchainCreateInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImageSwapchainCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eImageSwapchainCreateInfoKHR>
+  {
+    using Type = ImageSwapchainCreateInfoKHR;
+  };
+
+  struct ImageViewASTCDecodeModeEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewAstcDecodeModeEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ImageViewASTCDecodeModeEXT(VULKAN_HPP_NAMESPACE::Format decodeMode_ = VULKAN_HPP_NAMESPACE::Format::eUndefined) VULKAN_HPP_NOEXCEPT
+    : decodeMode( decodeMode_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImageViewASTCDecodeModeEXT( ImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImageViewASTCDecodeModeEXT( VkImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ImageViewASTCDecodeModeEXT & operator=( VkImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageViewASTCDecodeModeEXT const *>( &rhs );
+      return *this;
+    }
+
+    ImageViewASTCDecodeModeEXT & operator=( ImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ImageViewASTCDecodeModeEXT ) );
+      return *this;
+    }
+
+    ImageViewASTCDecodeModeEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImageViewASTCDecodeModeEXT & setDecodeMode( VULKAN_HPP_NAMESPACE::Format decodeMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      decodeMode = decodeMode_;
+      return *this;
+    }
+
+
+    operator VkImageViewASTCDecodeModeEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageViewASTCDecodeModeEXT*>( this );
+    }
+
+    operator VkImageViewASTCDecodeModeEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageViewASTCDecodeModeEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ImageViewASTCDecodeModeEXT const& ) const = default;
+#else
+    bool operator==( ImageViewASTCDecodeModeEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( decodeMode == rhs.decodeMode );
+    }
+
+    bool operator!=( ImageViewASTCDecodeModeEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewAstcDecodeModeEXT;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Format decodeMode = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+
+  };
+  static_assert( sizeof( ImageViewASTCDecodeModeEXT ) == sizeof( VkImageViewASTCDecodeModeEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImageViewASTCDecodeModeEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eImageViewAstcDecodeModeEXT>
+  {
+    using Type = ImageViewASTCDecodeModeEXT;
+  };
+
+  struct ImageViewUsageCreateInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewUsageCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ImageViewUsageCreateInfo(VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}) VULKAN_HPP_NOEXCEPT
+    : usage( usage_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImageViewUsageCreateInfo( ImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImageViewUsageCreateInfo( VkImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ImageViewUsageCreateInfo & operator=( VkImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageViewUsageCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+    ImageViewUsageCreateInfo & operator=( ImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ImageViewUsageCreateInfo ) );
+      return *this;
+    }
+
+    ImageViewUsageCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImageViewUsageCreateInfo & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      usage = usage_;
+      return *this;
+    }
+
+
+    operator VkImageViewUsageCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageViewUsageCreateInfo*>( this );
+    }
+
+    operator VkImageViewUsageCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageViewUsageCreateInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ImageViewUsageCreateInfo const& ) const = default;
+#else
+    bool operator==( ImageViewUsageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( usage == rhs.usage );
+    }
+
+    bool operator!=( ImageViewUsageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewUsageCreateInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {};
+
+  };
+  static_assert( sizeof( ImageViewUsageCreateInfo ) == sizeof( VkImageViewUsageCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImageViewUsageCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eImageViewUsageCreateInfo>
+  {
+    using Type = ImageViewUsageCreateInfo;
+  };
+  using ImageViewUsageCreateInfoKHR = ImageViewUsageCreateInfo;
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+  struct ImportAndroidHardwareBufferInfoANDROID
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportAndroidHardwareBufferInfoANDROID;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ImportAndroidHardwareBufferInfoANDROID(struct AHardwareBuffer* buffer_ = {}) VULKAN_HPP_NOEXCEPT
+    : buffer( buffer_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImportAndroidHardwareBufferInfoANDROID( ImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImportAndroidHardwareBufferInfoANDROID( VkImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ImportAndroidHardwareBufferInfoANDROID & operator=( VkImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportAndroidHardwareBufferInfoANDROID const *>( &rhs );
+      return *this;
+    }
+
+    ImportAndroidHardwareBufferInfoANDROID & operator=( ImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ImportAndroidHardwareBufferInfoANDROID ) );
+      return *this;
+    }
+
+    ImportAndroidHardwareBufferInfoANDROID & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImportAndroidHardwareBufferInfoANDROID & setBuffer( struct AHardwareBuffer* buffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      buffer = buffer_;
+      return *this;
+    }
+
+
+    operator VkImportAndroidHardwareBufferInfoANDROID const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImportAndroidHardwareBufferInfoANDROID*>( this );
+    }
+
+    operator VkImportAndroidHardwareBufferInfoANDROID &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImportAndroidHardwareBufferInfoANDROID*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ImportAndroidHardwareBufferInfoANDROID const& ) const = default;
+#else
+    bool operator==( ImportAndroidHardwareBufferInfoANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( buffer == rhs.buffer );
+    }
+
+    bool operator!=( ImportAndroidHardwareBufferInfoANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportAndroidHardwareBufferInfoANDROID;
+    const void* pNext = {};
+    struct AHardwareBuffer* buffer = {};
+
+  };
+  static_assert( sizeof( ImportAndroidHardwareBufferInfoANDROID ) == sizeof( VkImportAndroidHardwareBufferInfoANDROID ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImportAndroidHardwareBufferInfoANDROID>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eImportAndroidHardwareBufferInfoANDROID>
+  {
+    using Type = ImportAndroidHardwareBufferInfoANDROID;
+  };
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+  struct ImportMemoryFdInfoKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryFdInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ImportMemoryFdInfoKHR(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, int fd_ = {}) VULKAN_HPP_NOEXCEPT
+    : handleType( handleType_ ), fd( fd_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImportMemoryFdInfoKHR( ImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImportMemoryFdInfoKHR( VkImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ImportMemoryFdInfoKHR & operator=( VkImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMemoryFdInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+    ImportMemoryFdInfoKHR & operator=( ImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ImportMemoryFdInfoKHR ) );
+      return *this;
+    }
+
+    ImportMemoryFdInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImportMemoryFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+    ImportMemoryFdInfoKHR & setFd( int fd_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fd = fd_;
+      return *this;
+    }
+
+
+    operator VkImportMemoryFdInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImportMemoryFdInfoKHR*>( this );
+    }
+
+    operator VkImportMemoryFdInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImportMemoryFdInfoKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ImportMemoryFdInfoKHR const& ) const = default;
+#else
+    bool operator==( ImportMemoryFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( handleType == rhs.handleType )
+          && ( fd == rhs.fd );
+    }
+
+    bool operator!=( ImportMemoryFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryFdInfoKHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
+    int fd = {};
+
+  };
+  static_assert( sizeof( ImportMemoryFdInfoKHR ) == sizeof( VkImportMemoryFdInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImportMemoryFdInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eImportMemoryFdInfoKHR>
+  {
+    using Type = ImportMemoryFdInfoKHR;
+  };
+
+  struct ImportMemoryHostPointerInfoEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryHostPointerInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ImportMemoryHostPointerInfoEXT(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, void* pHostPointer_ = {}) VULKAN_HPP_NOEXCEPT
+    : handleType( handleType_ ), pHostPointer( pHostPointer_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImportMemoryHostPointerInfoEXT( ImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImportMemoryHostPointerInfoEXT( VkImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ImportMemoryHostPointerInfoEXT & operator=( VkImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMemoryHostPointerInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+    ImportMemoryHostPointerInfoEXT & operator=( ImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ImportMemoryHostPointerInfoEXT ) );
+      return *this;
+    }
+
+    ImportMemoryHostPointerInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImportMemoryHostPointerInfoEXT & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+    ImportMemoryHostPointerInfoEXT & setPHostPointer( void* pHostPointer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pHostPointer = pHostPointer_;
+      return *this;
+    }
+
+
+    operator VkImportMemoryHostPointerInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImportMemoryHostPointerInfoEXT*>( this );
+    }
+
+    operator VkImportMemoryHostPointerInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImportMemoryHostPointerInfoEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ImportMemoryHostPointerInfoEXT const& ) const = default;
+#else
+    bool operator==( ImportMemoryHostPointerInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( handleType == rhs.handleType )
+          && ( pHostPointer == rhs.pHostPointer );
+    }
+
+    bool operator!=( ImportMemoryHostPointerInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryHostPointerInfoEXT;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
+    void* pHostPointer = {};
+
+  };
+  static_assert( sizeof( ImportMemoryHostPointerInfoEXT ) == sizeof( VkImportMemoryHostPointerInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImportMemoryHostPointerInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eImportMemoryHostPointerInfoEXT>
+  {
+    using Type = ImportMemoryHostPointerInfoEXT;
+  };
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  struct ImportMemoryWin32HandleInfoKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryWin32HandleInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoKHR(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, HANDLE handle_ = {}, LPCWSTR name_ = {}) VULKAN_HPP_NOEXCEPT
+    : handleType( handleType_ ), handle( handle_ ), name( name_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoKHR( ImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImportMemoryWin32HandleInfoKHR( VkImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ImportMemoryWin32HandleInfoKHR & operator=( VkImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+    ImportMemoryWin32HandleInfoKHR & operator=( ImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ImportMemoryWin32HandleInfoKHR ) );
+      return *this;
+    }
+
+    ImportMemoryWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImportMemoryWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+    ImportMemoryWin32HandleInfoKHR & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handle = handle_;
+      return *this;
+    }
+
+    ImportMemoryWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT
+    {
+      name = name_;
+      return *this;
+    }
+
+
+    operator VkImportMemoryWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImportMemoryWin32HandleInfoKHR*>( this );
+    }
+
+    operator VkImportMemoryWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImportMemoryWin32HandleInfoKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ImportMemoryWin32HandleInfoKHR const& ) const = default;
+#else
+    bool operator==( ImportMemoryWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( handleType == rhs.handleType )
+          && ( handle == rhs.handle )
+          && ( name == rhs.name );
+    }
+
+    bool operator!=( ImportMemoryWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryWin32HandleInfoKHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
+    HANDLE handle = {};
+    LPCWSTR name = {};
+
+  };
+  static_assert( sizeof( ImportMemoryWin32HandleInfoKHR ) == sizeof( VkImportMemoryWin32HandleInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImportMemoryWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eImportMemoryWin32HandleInfoKHR>
+  {
+    using Type = ImportMemoryWin32HandleInfoKHR;
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  struct ImportMemoryWin32HandleInfoNV
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryWin32HandleInfoNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoNV(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType_ = {}, HANDLE handle_ = {}) VULKAN_HPP_NOEXCEPT
+    : handleType( handleType_ ), handle( handle_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoNV( ImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ImportMemoryWin32HandleInfoNV( VkImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ImportMemoryWin32HandleInfoNV & operator=( VkImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoNV const *>( &rhs );
+      return *this;
+    }
+
+    ImportMemoryWin32HandleInfoNV & operator=( ImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ImportMemoryWin32HandleInfoNV ) );
+      return *this;
+    }
+
+    ImportMemoryWin32HandleInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImportMemoryWin32HandleInfoNV & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+    ImportMemoryWin32HandleInfoNV & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handle = handle_;
+      return *this;
+    }
+
+
+    operator VkImportMemoryWin32HandleInfoNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImportMemoryWin32HandleInfoNV*>( this );
+    }
+
+    operator VkImportMemoryWin32HandleInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImportMemoryWin32HandleInfoNV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ImportMemoryWin32HandleInfoNV const& ) const = default;
+#else
+    bool operator==( ImportMemoryWin32HandleInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( handleType == rhs.handleType )
+          && ( handle == rhs.handle );
+    }
+
+    bool operator!=( ImportMemoryWin32HandleInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryWin32HandleInfoNV;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType = {};
+    HANDLE handle = {};
+
+  };
+  static_assert( sizeof( ImportMemoryWin32HandleInfoNV ) == sizeof( VkImportMemoryWin32HandleInfoNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImportMemoryWin32HandleInfoNV>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eImportMemoryWin32HandleInfoNV>
+  {
+    using Type = ImportMemoryWin32HandleInfoNV;
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  struct InputAttachmentAspectReference
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR InputAttachmentAspectReference(uint32_t subpass_ = {}, uint32_t inputAttachmentIndex_ = {}, VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}) VULKAN_HPP_NOEXCEPT
+    : subpass( subpass_ ), inputAttachmentIndex( inputAttachmentIndex_ ), aspectMask( aspectMask_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR InputAttachmentAspectReference( InputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    InputAttachmentAspectReference( VkInputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    InputAttachmentAspectReference & operator=( VkInputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference const *>( &rhs );
+      return *this;
+    }
+
+    InputAttachmentAspectReference & operator=( InputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( InputAttachmentAspectReference ) );
+      return *this;
+    }
+
+    InputAttachmentAspectReference & setSubpass( uint32_t subpass_ ) VULKAN_HPP_NOEXCEPT
+    {
+      subpass = subpass_;
+      return *this;
+    }
+
+    InputAttachmentAspectReference & setInputAttachmentIndex( uint32_t inputAttachmentIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      inputAttachmentIndex = inputAttachmentIndex_;
+      return *this;
+    }
+
+    InputAttachmentAspectReference & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      aspectMask = aspectMask_;
+      return *this;
+    }
+
+
+    operator VkInputAttachmentAspectReference const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkInputAttachmentAspectReference*>( this );
+    }
+
+    operator VkInputAttachmentAspectReference &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkInputAttachmentAspectReference*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( InputAttachmentAspectReference const& ) const = default;
+#else
+    bool operator==( InputAttachmentAspectReference const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( subpass == rhs.subpass )
+          && ( inputAttachmentIndex == rhs.inputAttachmentIndex )
+          && ( aspectMask == rhs.aspectMask );
+    }
+
+    bool operator!=( InputAttachmentAspectReference const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    uint32_t subpass = {};
+    uint32_t inputAttachmentIndex = {};
+    VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
+
+  };
+  static_assert( sizeof( InputAttachmentAspectReference ) == sizeof( VkInputAttachmentAspectReference ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<InputAttachmentAspectReference>::value, "struct wrapper is not a standard layout!" );
+  using InputAttachmentAspectReferenceKHR = InputAttachmentAspectReference;
+
+  struct InstanceCreateInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eInstanceCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR InstanceCreateInfo(VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_ = {}, const VULKAN_HPP_NAMESPACE::ApplicationInfo* pApplicationInfo_ = {}, uint32_t enabledLayerCount_ = {}, const char* const * ppEnabledLayerNames_ = {}, uint32_t enabledExtensionCount_ = {}, const char* const * ppEnabledExtensionNames_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), pApplicationInfo( pApplicationInfo_ ), enabledLayerCount( enabledLayerCount_ ), ppEnabledLayerNames( ppEnabledLayerNames_ ), enabledExtensionCount( enabledExtensionCount_ ), ppEnabledExtensionNames( ppEnabledExtensionNames_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR InstanceCreateInfo( InstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    InstanceCreateInfo( VkInstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    InstanceCreateInfo( VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_, const VULKAN_HPP_NAMESPACE::ApplicationInfo* pApplicationInfo_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char* const > const & pEnabledLayerNames_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char* const > const & pEnabledExtensionNames_ = {} )
+    : flags( flags_ ), pApplicationInfo( pApplicationInfo_ ), enabledLayerCount( static_cast<uint32_t>( pEnabledLayerNames_.size() ) ), ppEnabledLayerNames( pEnabledLayerNames_.data() ), enabledExtensionCount( static_cast<uint32_t>( pEnabledExtensionNames_.size() ) ), ppEnabledExtensionNames( pEnabledExtensionNames_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    InstanceCreateInfo & operator=( VkInstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::InstanceCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+    InstanceCreateInfo & operator=( InstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( InstanceCreateInfo ) );
+      return *this;
+    }
+
+    InstanceCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    InstanceCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    InstanceCreateInfo & setPApplicationInfo( const VULKAN_HPP_NAMESPACE::ApplicationInfo* pApplicationInfo_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pApplicationInfo = pApplicationInfo_;
+      return *this;
+    }
+
+    InstanceCreateInfo & setEnabledLayerCount( uint32_t enabledLayerCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      enabledLayerCount = enabledLayerCount_;
+      return *this;
+    }
+
+    InstanceCreateInfo & setPpEnabledLayerNames( const char* const * ppEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT
+    {
+      ppEnabledLayerNames = ppEnabledLayerNames_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    InstanceCreateInfo & setPEnabledLayerNames( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char* const > const & pEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT
+    {
+      enabledLayerCount = static_cast<uint32_t>( pEnabledLayerNames_.size() );
+      ppEnabledLayerNames = pEnabledLayerNames_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    InstanceCreateInfo & setEnabledExtensionCount( uint32_t enabledExtensionCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      enabledExtensionCount = enabledExtensionCount_;
+      return *this;
+    }
+
+    InstanceCreateInfo & setPpEnabledExtensionNames( const char* const * ppEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT
+    {
+      ppEnabledExtensionNames = ppEnabledExtensionNames_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    InstanceCreateInfo & setPEnabledExtensionNames( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char* const > const & pEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT
+    {
+      enabledExtensionCount = static_cast<uint32_t>( pEnabledExtensionNames_.size() );
+      ppEnabledExtensionNames = pEnabledExtensionNames_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkInstanceCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkInstanceCreateInfo*>( this );
+    }
+
+    operator VkInstanceCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkInstanceCreateInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( InstanceCreateInfo const& ) const = default;
+#else
+    bool operator==( InstanceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( pApplicationInfo == rhs.pApplicationInfo )
+          && ( enabledLayerCount == rhs.enabledLayerCount )
+          && ( ppEnabledLayerNames == rhs.ppEnabledLayerNames )
+          && ( enabledExtensionCount == rhs.enabledExtensionCount )
+          && ( ppEnabledExtensionNames == rhs.ppEnabledExtensionNames );
+    }
+
+    bool operator!=( InstanceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eInstanceCreateInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags = {};
+    const VULKAN_HPP_NAMESPACE::ApplicationInfo* pApplicationInfo = {};
+    uint32_t enabledLayerCount = {};
+    const char* const * ppEnabledLayerNames = {};
+    uint32_t enabledExtensionCount = {};
+    const char* const * ppEnabledExtensionNames = {};
+
+  };
+  static_assert( sizeof( InstanceCreateInfo ) == sizeof( VkInstanceCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<InstanceCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eInstanceCreateInfo>
+  {
+    using Type = InstanceCreateInfo;
+  };
+
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+  struct MacOSSurfaceCreateInfoMVK
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMacosSurfaceCreateInfoMVK;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR MacOSSurfaceCreateInfoMVK(VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags_ = {}, const void* pView_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), pView( pView_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR MacOSSurfaceCreateInfoMVK( MacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    MacOSSurfaceCreateInfoMVK( VkMacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    MacOSSurfaceCreateInfoMVK & operator=( VkMacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK const *>( &rhs );
+      return *this;
+    }
+
+    MacOSSurfaceCreateInfoMVK & operator=( MacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( MacOSSurfaceCreateInfoMVK ) );
+      return *this;
+    }
+
+    MacOSSurfaceCreateInfoMVK & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    MacOSSurfaceCreateInfoMVK & setFlags( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    MacOSSurfaceCreateInfoMVK & setPView( const void* pView_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pView = pView_;
+      return *this;
+    }
+
+
+    operator VkMacOSSurfaceCreateInfoMVK const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK*>( this );
+    }
+
+    operator VkMacOSSurfaceCreateInfoMVK &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMacOSSurfaceCreateInfoMVK*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( MacOSSurfaceCreateInfoMVK const& ) const = default;
+#else
+    bool operator==( MacOSSurfaceCreateInfoMVK const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( pView == rhs.pView );
+    }
+
+    bool operator!=( MacOSSurfaceCreateInfoMVK const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMacosSurfaceCreateInfoMVK;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags = {};
+    const void* pView = {};
+
+  };
+  static_assert( sizeof( MacOSSurfaceCreateInfoMVK ) == sizeof( VkMacOSSurfaceCreateInfoMVK ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<MacOSSurfaceCreateInfoMVK>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eMacosSurfaceCreateInfoMVK>
+  {
+    using Type = MacOSSurfaceCreateInfoMVK;
+  };
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+
+  struct MemoryAllocateFlagsInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryAllocateFlagsInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR MemoryAllocateFlagsInfo(VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags_ = {}, uint32_t deviceMask_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), deviceMask( deviceMask_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR MemoryAllocateFlagsInfo( MemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    MemoryAllocateFlagsInfo( VkMemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    MemoryAllocateFlagsInfo & operator=( VkMemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryAllocateFlagsInfo const *>( &rhs );
+      return *this;
+    }
+
+    MemoryAllocateFlagsInfo & operator=( MemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( MemoryAllocateFlagsInfo ) );
+      return *this;
+    }
+
+    MemoryAllocateFlagsInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    MemoryAllocateFlagsInfo & setFlags( VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    MemoryAllocateFlagsInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      deviceMask = deviceMask_;
+      return *this;
+    }
+
+
+    operator VkMemoryAllocateFlagsInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMemoryAllocateFlagsInfo*>( this );
+    }
+
+    operator VkMemoryAllocateFlagsInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMemoryAllocateFlagsInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( MemoryAllocateFlagsInfo const& ) const = default;
+#else
+    bool operator==( MemoryAllocateFlagsInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( deviceMask == rhs.deviceMask );
+    }
+
+    bool operator!=( MemoryAllocateFlagsInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryAllocateFlagsInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags = {};
+    uint32_t deviceMask = {};
+
+  };
+  static_assert( sizeof( MemoryAllocateFlagsInfo ) == sizeof( VkMemoryAllocateFlagsInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<MemoryAllocateFlagsInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eMemoryAllocateFlagsInfo>
+  {
+    using Type = MemoryAllocateFlagsInfo;
+  };
+  using MemoryAllocateFlagsInfoKHR = MemoryAllocateFlagsInfo;
+
+  struct MemoryDedicatedAllocateInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryDedicatedAllocateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR MemoryDedicatedAllocateInfo(VULKAN_HPP_NAMESPACE::Image image_ = {}, VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}) VULKAN_HPP_NOEXCEPT
+    : image( image_ ), buffer( buffer_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR MemoryDedicatedAllocateInfo( MemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    MemoryDedicatedAllocateInfo( VkMemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    MemoryDedicatedAllocateInfo & operator=( VkMemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfo const *>( &rhs );
+      return *this;
+    }
+
+    MemoryDedicatedAllocateInfo & operator=( MemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( MemoryDedicatedAllocateInfo ) );
+      return *this;
+    }
+
+    MemoryDedicatedAllocateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    MemoryDedicatedAllocateInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
+    {
+      image = image_;
+      return *this;
+    }
+
+    MemoryDedicatedAllocateInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      buffer = buffer_;
+      return *this;
+    }
+
+
+    operator VkMemoryDedicatedAllocateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMemoryDedicatedAllocateInfo*>( this );
+    }
+
+    operator VkMemoryDedicatedAllocateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMemoryDedicatedAllocateInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( MemoryDedicatedAllocateInfo const& ) const = default;
+#else
+    bool operator==( MemoryDedicatedAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( image == rhs.image )
+          && ( buffer == rhs.buffer );
+    }
+
+    bool operator!=( MemoryDedicatedAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryDedicatedAllocateInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Image image = {};
+    VULKAN_HPP_NAMESPACE::Buffer buffer = {};
+
+  };
+  static_assert( sizeof( MemoryDedicatedAllocateInfo ) == sizeof( VkMemoryDedicatedAllocateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<MemoryDedicatedAllocateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eMemoryDedicatedAllocateInfo>
+  {
+    using Type = MemoryDedicatedAllocateInfo;
+  };
+  using MemoryDedicatedAllocateInfoKHR = MemoryDedicatedAllocateInfo;
+
+  struct MemoryDedicatedRequirements
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryDedicatedRequirements;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR MemoryDedicatedRequirements(VULKAN_HPP_NAMESPACE::Bool32 prefersDedicatedAllocation_ = {}, VULKAN_HPP_NAMESPACE::Bool32 requiresDedicatedAllocation_ = {}) VULKAN_HPP_NOEXCEPT
+    : prefersDedicatedAllocation( prefersDedicatedAllocation_ ), requiresDedicatedAllocation( requiresDedicatedAllocation_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR MemoryDedicatedRequirements( MemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    MemoryDedicatedRequirements( VkMemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    MemoryDedicatedRequirements & operator=( VkMemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryDedicatedRequirements const *>( &rhs );
+      return *this;
+    }
+
+    MemoryDedicatedRequirements & operator=( MemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( MemoryDedicatedRequirements ) );
+      return *this;
+    }
+
+
+    operator VkMemoryDedicatedRequirements const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMemoryDedicatedRequirements*>( this );
+    }
+
+    operator VkMemoryDedicatedRequirements &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMemoryDedicatedRequirements*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( MemoryDedicatedRequirements const& ) const = default;
+#else
+    bool operator==( MemoryDedicatedRequirements const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( prefersDedicatedAllocation == rhs.prefersDedicatedAllocation )
+          && ( requiresDedicatedAllocation == rhs.requiresDedicatedAllocation );
+    }
+
+    bool operator!=( MemoryDedicatedRequirements const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryDedicatedRequirements;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 prefersDedicatedAllocation = {};
+    VULKAN_HPP_NAMESPACE::Bool32 requiresDedicatedAllocation = {};
+
+  };
+  static_assert( sizeof( MemoryDedicatedRequirements ) == sizeof( VkMemoryDedicatedRequirements ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<MemoryDedicatedRequirements>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eMemoryDedicatedRequirements>
+  {
+    using Type = MemoryDedicatedRequirements;
+  };
+  using MemoryDedicatedRequirementsKHR = MemoryDedicatedRequirements;
+
+  struct MemoryOpaqueCaptureAddressAllocateInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryOpaqueCaptureAddressAllocateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR MemoryOpaqueCaptureAddressAllocateInfo(uint64_t opaqueCaptureAddress_ = {}) VULKAN_HPP_NOEXCEPT
+    : opaqueCaptureAddress( opaqueCaptureAddress_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR MemoryOpaqueCaptureAddressAllocateInfo( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    MemoryOpaqueCaptureAddressAllocateInfo( VkMemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    MemoryOpaqueCaptureAddressAllocateInfo & operator=( VkMemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryOpaqueCaptureAddressAllocateInfo const *>( &rhs );
+      return *this;
+    }
+
+    MemoryOpaqueCaptureAddressAllocateInfo & operator=( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( MemoryOpaqueCaptureAddressAllocateInfo ) );
+      return *this;
+    }
+
+    MemoryOpaqueCaptureAddressAllocateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    MemoryOpaqueCaptureAddressAllocateInfo & setOpaqueCaptureAddress( uint64_t opaqueCaptureAddress_ ) VULKAN_HPP_NOEXCEPT
+    {
+      opaqueCaptureAddress = opaqueCaptureAddress_;
+      return *this;
+    }
+
+
+    operator VkMemoryOpaqueCaptureAddressAllocateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMemoryOpaqueCaptureAddressAllocateInfo*>( this );
+    }
+
+    operator VkMemoryOpaqueCaptureAddressAllocateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMemoryOpaqueCaptureAddressAllocateInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( MemoryOpaqueCaptureAddressAllocateInfo const& ) const = default;
+#else
+    bool operator==( MemoryOpaqueCaptureAddressAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( opaqueCaptureAddress == rhs.opaqueCaptureAddress );
+    }
+
+    bool operator!=( MemoryOpaqueCaptureAddressAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryOpaqueCaptureAddressAllocateInfo;
+    const void* pNext = {};
+    uint64_t opaqueCaptureAddress = {};
+
+  };
+  static_assert( sizeof( MemoryOpaqueCaptureAddressAllocateInfo ) == sizeof( VkMemoryOpaqueCaptureAddressAllocateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<MemoryOpaqueCaptureAddressAllocateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eMemoryOpaqueCaptureAddressAllocateInfo>
+  {
+    using Type = MemoryOpaqueCaptureAddressAllocateInfo;
+  };
+  using MemoryOpaqueCaptureAddressAllocateInfoKHR = MemoryOpaqueCaptureAddressAllocateInfo;
+
+  struct MemoryPriorityAllocateInfoEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryPriorityAllocateInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR MemoryPriorityAllocateInfoEXT(float priority_ = {}) VULKAN_HPP_NOEXCEPT
+    : priority( priority_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR MemoryPriorityAllocateInfoEXT( MemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    MemoryPriorityAllocateInfoEXT( VkMemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    MemoryPriorityAllocateInfoEXT & operator=( VkMemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryPriorityAllocateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+    MemoryPriorityAllocateInfoEXT & operator=( MemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( MemoryPriorityAllocateInfoEXT ) );
+      return *this;
+    }
+
+    MemoryPriorityAllocateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    MemoryPriorityAllocateInfoEXT & setPriority( float priority_ ) VULKAN_HPP_NOEXCEPT
+    {
+      priority = priority_;
+      return *this;
+    }
+
+
+    operator VkMemoryPriorityAllocateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMemoryPriorityAllocateInfoEXT*>( this );
+    }
+
+    operator VkMemoryPriorityAllocateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMemoryPriorityAllocateInfoEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( MemoryPriorityAllocateInfoEXT const& ) const = default;
+#else
+    bool operator==( MemoryPriorityAllocateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( priority == rhs.priority );
+    }
+
+    bool operator!=( MemoryPriorityAllocateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryPriorityAllocateInfoEXT;
+    const void* pNext = {};
+    float priority = {};
+
+  };
+  static_assert( sizeof( MemoryPriorityAllocateInfoEXT ) == sizeof( VkMemoryPriorityAllocateInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<MemoryPriorityAllocateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eMemoryPriorityAllocateInfoEXT>
+  {
+    using Type = MemoryPriorityAllocateInfoEXT;
+  };
+
+#ifdef VK_USE_PLATFORM_METAL_EXT
+  struct MetalSurfaceCreateInfoEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMetalSurfaceCreateInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR MetalSurfaceCreateInfoEXT(VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags_ = {}, const CAMetalLayer* pLayer_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), pLayer( pLayer_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR MetalSurfaceCreateInfoEXT( MetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    MetalSurfaceCreateInfoEXT( VkMetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    MetalSurfaceCreateInfoEXT & operator=( VkMetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+    MetalSurfaceCreateInfoEXT & operator=( MetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( MetalSurfaceCreateInfoEXT ) );
+      return *this;
+    }
+
+    MetalSurfaceCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    MetalSurfaceCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    MetalSurfaceCreateInfoEXT & setPLayer( const CAMetalLayer* pLayer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pLayer = pLayer_;
+      return *this;
+    }
+
+
+    operator VkMetalSurfaceCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMetalSurfaceCreateInfoEXT*>( this );
+    }
+
+    operator VkMetalSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMetalSurfaceCreateInfoEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( MetalSurfaceCreateInfoEXT const& ) const = default;
+#else
+    bool operator==( MetalSurfaceCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( pLayer == rhs.pLayer );
+    }
+
+    bool operator!=( MetalSurfaceCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMetalSurfaceCreateInfoEXT;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags = {};
+    const CAMetalLayer* pLayer = {};
+
+  };
+  static_assert( sizeof( MetalSurfaceCreateInfoEXT ) == sizeof( VkMetalSurfaceCreateInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<MetalSurfaceCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eMetalSurfaceCreateInfoEXT>
+  {
+    using Type = MetalSurfaceCreateInfoEXT;
+  };
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+  union PerformanceCounterResultKHR
+  {
+    PerformanceCounterResultKHR( VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR const& rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void*>(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR ) );
+    }
+
+    PerformanceCounterResultKHR( int32_t int32_ = {} )
+      : int32( int32_ )
+    {}
+
+    PerformanceCounterResultKHR( int64_t int64_ )
+      : int64( int64_ )
+    {}
+
+    PerformanceCounterResultKHR( uint32_t uint32_ )
+      : uint32( uint32_ )
+    {}
+
+    PerformanceCounterResultKHR( uint64_t uint64_ )
+      : uint64( uint64_ )
+    {}
+
+    PerformanceCounterResultKHR( float float32_ )
+      : float32( float32_ )
+    {}
+
+    PerformanceCounterResultKHR( double float64_ )
+      : float64( float64_ )
+    {}
+
+    PerformanceCounterResultKHR & setInt32( int32_t int32_ ) VULKAN_HPP_NOEXCEPT
+    {
+      int32 = int32_;
+      return *this;
+    }
+
+    PerformanceCounterResultKHR & setInt64( int64_t int64_ ) VULKAN_HPP_NOEXCEPT
+    {
+      int64 = int64_;
+      return *this;
+    }
+
+    PerformanceCounterResultKHR & setUint32( uint32_t uint32_ ) VULKAN_HPP_NOEXCEPT
+    {
+      uint32 = uint32_;
+      return *this;
+    }
+
+    PerformanceCounterResultKHR & setUint64( uint64_t uint64_ ) VULKAN_HPP_NOEXCEPT
+    {
+      uint64 = uint64_;
+      return *this;
+    }
+
+    PerformanceCounterResultKHR & setFloat32( float float32_ ) VULKAN_HPP_NOEXCEPT
+    {
+      float32 = float32_;
+      return *this;
+    }
+
+    PerformanceCounterResultKHR & setFloat64( double float64_ ) VULKAN_HPP_NOEXCEPT
+    {
+      float64 = float64_;
+      return *this;
+    }
+
+    VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR & operator=( VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void*>(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR ) );
+      return *this;
+    }
+
+    operator VkPerformanceCounterResultKHR const&() const
+    {
+      return *reinterpret_cast<const VkPerformanceCounterResultKHR*>(this);
+    }
+
+    operator VkPerformanceCounterResultKHR &()
+    {
+      return *reinterpret_cast<VkPerformanceCounterResultKHR*>(this);
+    }
+
+    int32_t int32;
+    int64_t int64;
+    uint32_t uint32;
+    uint64_t uint64;
+    float float32;
+    double float64;
+  };
+
+  struct PerformanceQuerySubmitInfoKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceQuerySubmitInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PerformanceQuerySubmitInfoKHR(uint32_t counterPassIndex_ = {}) VULKAN_HPP_NOEXCEPT
+    : counterPassIndex( counterPassIndex_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PerformanceQuerySubmitInfoKHR( PerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PerformanceQuerySubmitInfoKHR( VkPerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PerformanceQuerySubmitInfoKHR & operator=( VkPerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceQuerySubmitInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+    PerformanceQuerySubmitInfoKHR & operator=( PerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PerformanceQuerySubmitInfoKHR ) );
+      return *this;
+    }
+
+    PerformanceQuerySubmitInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PerformanceQuerySubmitInfoKHR & setCounterPassIndex( uint32_t counterPassIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      counterPassIndex = counterPassIndex_;
+      return *this;
+    }
+
+
+    operator VkPerformanceQuerySubmitInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPerformanceQuerySubmitInfoKHR*>( this );
+    }
+
+    operator VkPerformanceQuerySubmitInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPerformanceQuerySubmitInfoKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PerformanceQuerySubmitInfoKHR const& ) const = default;
+#else
+    bool operator==( PerformanceQuerySubmitInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( counterPassIndex == rhs.counterPassIndex );
+    }
+
+    bool operator!=( PerformanceQuerySubmitInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceQuerySubmitInfoKHR;
+    const void* pNext = {};
+    uint32_t counterPassIndex = {};
+
+  };
+  static_assert( sizeof( PerformanceQuerySubmitInfoKHR ) == sizeof( VkPerformanceQuerySubmitInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PerformanceQuerySubmitInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePerformanceQuerySubmitInfoKHR>
+  {
+    using Type = PerformanceQuerySubmitInfoKHR;
+  };
+
+  struct PhysicalDevice16BitStorageFeatures
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevice16BitStorageFeatures;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDevice16BitStorageFeatures(VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ = {}) VULKAN_HPP_NOEXCEPT
+    : storageBuffer16BitAccess( storageBuffer16BitAccess_ ), uniformAndStorageBuffer16BitAccess( uniformAndStorageBuffer16BitAccess_ ), storagePushConstant16( storagePushConstant16_ ), storageInputOutput16( storageInputOutput16_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDevice16BitStorageFeatures( PhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDevice16BitStorageFeatures( VkPhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDevice16BitStorageFeatures & operator=( VkPhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevice16BitStorageFeatures const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDevice16BitStorageFeatures & operator=( PhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDevice16BitStorageFeatures ) );
+      return *this;
+    }
+
+    PhysicalDevice16BitStorageFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDevice16BitStorageFeatures & setStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT
+    {
+      storageBuffer16BitAccess = storageBuffer16BitAccess_;
+      return *this;
+    }
+
+    PhysicalDevice16BitStorageFeatures & setUniformAndStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT
+    {
+      uniformAndStorageBuffer16BitAccess = uniformAndStorageBuffer16BitAccess_;
+      return *this;
+    }
+
+    PhysicalDevice16BitStorageFeatures & setStoragePushConstant16( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ ) VULKAN_HPP_NOEXCEPT
+    {
+      storagePushConstant16 = storagePushConstant16_;
+      return *this;
+    }
+
+    PhysicalDevice16BitStorageFeatures & setStorageInputOutput16( VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ ) VULKAN_HPP_NOEXCEPT
+    {
+      storageInputOutput16 = storageInputOutput16_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDevice16BitStorageFeatures const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDevice16BitStorageFeatures*>( this );
+    }
+
+    operator VkPhysicalDevice16BitStorageFeatures &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDevice16BitStorageFeatures*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDevice16BitStorageFeatures const& ) const = default;
+#else
+    bool operator==( PhysicalDevice16BitStorageFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( storageBuffer16BitAccess == rhs.storageBuffer16BitAccess )
+          && ( uniformAndStorageBuffer16BitAccess == rhs.uniformAndStorageBuffer16BitAccess )
+          && ( storagePushConstant16 == rhs.storagePushConstant16 )
+          && ( storageInputOutput16 == rhs.storageInputOutput16 );
+    }
+
+    bool operator!=( PhysicalDevice16BitStorageFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice16BitStorageFeatures;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess = {};
+    VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess = {};
+    VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16 = {};
+
+  };
+  static_assert( sizeof( PhysicalDevice16BitStorageFeatures ) == sizeof( VkPhysicalDevice16BitStorageFeatures ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDevice16BitStorageFeatures>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDevice16BitStorageFeatures>
+  {
+    using Type = PhysicalDevice16BitStorageFeatures;
+  };
+  using PhysicalDevice16BitStorageFeaturesKHR = PhysicalDevice16BitStorageFeatures;
+
+  struct PhysicalDevice4444FormatsFeaturesEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevice4444FormatsFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDevice4444FormatsFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 formatA4R4G4B4_ = {}, VULKAN_HPP_NAMESPACE::Bool32 formatA4B4G4R4_ = {}) VULKAN_HPP_NOEXCEPT
+    : formatA4R4G4B4( formatA4R4G4B4_ ), formatA4B4G4R4( formatA4B4G4R4_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDevice4444FormatsFeaturesEXT( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDevice4444FormatsFeaturesEXT( VkPhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDevice4444FormatsFeaturesEXT & operator=( VkPhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevice4444FormatsFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDevice4444FormatsFeaturesEXT & operator=( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDevice4444FormatsFeaturesEXT ) );
+      return *this;
+    }
+
+    PhysicalDevice4444FormatsFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDevice4444FormatsFeaturesEXT & setFormatA4R4G4B4( VULKAN_HPP_NAMESPACE::Bool32 formatA4R4G4B4_ ) VULKAN_HPP_NOEXCEPT
+    {
+      formatA4R4G4B4 = formatA4R4G4B4_;
+      return *this;
+    }
+
+    PhysicalDevice4444FormatsFeaturesEXT & setFormatA4B4G4R4( VULKAN_HPP_NAMESPACE::Bool32 formatA4B4G4R4_ ) VULKAN_HPP_NOEXCEPT
+    {
+      formatA4B4G4R4 = formatA4B4G4R4_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDevice4444FormatsFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDevice4444FormatsFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDevice4444FormatsFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDevice4444FormatsFeaturesEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDevice4444FormatsFeaturesEXT const& ) const = default;
+#else
+    bool operator==( PhysicalDevice4444FormatsFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( formatA4R4G4B4 == rhs.formatA4R4G4B4 )
+          && ( formatA4B4G4R4 == rhs.formatA4B4G4R4 );
+    }
+
+    bool operator!=( PhysicalDevice4444FormatsFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice4444FormatsFeaturesEXT;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 formatA4R4G4B4 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 formatA4B4G4R4 = {};
+
+  };
+  static_assert( sizeof( PhysicalDevice4444FormatsFeaturesEXT ) == sizeof( VkPhysicalDevice4444FormatsFeaturesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDevice4444FormatsFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDevice4444FormatsFeaturesEXT>
+  {
+    using Type = PhysicalDevice4444FormatsFeaturesEXT;
+  };
+
+  struct PhysicalDevice8BitStorageFeatures
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevice8BitStorageFeatures;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDevice8BitStorageFeatures(VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ = {}) VULKAN_HPP_NOEXCEPT
+    : storageBuffer8BitAccess( storageBuffer8BitAccess_ ), uniformAndStorageBuffer8BitAccess( uniformAndStorageBuffer8BitAccess_ ), storagePushConstant8( storagePushConstant8_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDevice8BitStorageFeatures( PhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDevice8BitStorageFeatures( VkPhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDevice8BitStorageFeatures & operator=( VkPhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevice8BitStorageFeatures const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDevice8BitStorageFeatures & operator=( PhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDevice8BitStorageFeatures ) );
+      return *this;
+    }
+
+    PhysicalDevice8BitStorageFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDevice8BitStorageFeatures & setStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT
+    {
+      storageBuffer8BitAccess = storageBuffer8BitAccess_;
+      return *this;
+    }
+
+    PhysicalDevice8BitStorageFeatures & setUniformAndStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT
+    {
+      uniformAndStorageBuffer8BitAccess = uniformAndStorageBuffer8BitAccess_;
+      return *this;
+    }
+
+    PhysicalDevice8BitStorageFeatures & setStoragePushConstant8( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ ) VULKAN_HPP_NOEXCEPT
+    {
+      storagePushConstant8 = storagePushConstant8_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDevice8BitStorageFeatures const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDevice8BitStorageFeatures*>( this );
+    }
+
+    operator VkPhysicalDevice8BitStorageFeatures &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDevice8BitStorageFeatures*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDevice8BitStorageFeatures const& ) const = default;
+#else
+    bool operator==( PhysicalDevice8BitStorageFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( storageBuffer8BitAccess == rhs.storageBuffer8BitAccess )
+          && ( uniformAndStorageBuffer8BitAccess == rhs.uniformAndStorageBuffer8BitAccess )
+          && ( storagePushConstant8 == rhs.storagePushConstant8 );
+    }
+
+    bool operator!=( PhysicalDevice8BitStorageFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice8BitStorageFeatures;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess = {};
+    VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess = {};
+    VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8 = {};
+
+  };
+  static_assert( sizeof( PhysicalDevice8BitStorageFeatures ) == sizeof( VkPhysicalDevice8BitStorageFeatures ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDevice8BitStorageFeatures>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDevice8BitStorageFeatures>
+  {
+    using Type = PhysicalDevice8BitStorageFeatures;
+  };
+  using PhysicalDevice8BitStorageFeaturesKHR = PhysicalDevice8BitStorageFeatures;
+
+  struct PhysicalDeviceASTCDecodeFeaturesEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceASTCDecodeFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent_ = {}) VULKAN_HPP_NOEXCEPT
+    : decodeModeSharedExponent( decodeModeSharedExponent_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceASTCDecodeFeaturesEXT( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceASTCDecodeFeaturesEXT( VkPhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceASTCDecodeFeaturesEXT & operator=( VkPhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceASTCDecodeFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceASTCDecodeFeaturesEXT & operator=( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceASTCDecodeFeaturesEXT ) );
+      return *this;
+    }
+
+    PhysicalDeviceASTCDecodeFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceASTCDecodeFeaturesEXT & setDecodeModeSharedExponent( VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent_ ) VULKAN_HPP_NOEXCEPT
+    {
+      decodeModeSharedExponent = decodeModeSharedExponent_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceASTCDecodeFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceASTCDecodeFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceASTCDecodeFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceASTCDecodeFeaturesEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceASTCDecodeFeaturesEXT const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceASTCDecodeFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( decodeModeSharedExponent == rhs.decodeModeSharedExponent );
+    }
+
+    bool operator!=( PhysicalDeviceASTCDecodeFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceASTCDecodeFeaturesEXT ) == sizeof( VkPhysicalDeviceASTCDecodeFeaturesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceASTCDecodeFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT>
+  {
+    using Type = PhysicalDeviceASTCDecodeFeaturesEXT;
+  };
+
+  struct PhysicalDeviceBlendOperationAdvancedFeaturesEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations_ = {}) VULKAN_HPP_NOEXCEPT
+    : advancedBlendCoherentOperations( advancedBlendCoherentOperations_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedFeaturesEXT( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceBlendOperationAdvancedFeaturesEXT( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceBlendOperationAdvancedFeaturesEXT & operator=( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceBlendOperationAdvancedFeaturesEXT & operator=( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceBlendOperationAdvancedFeaturesEXT ) );
+      return *this;
+    }
+
+    PhysicalDeviceBlendOperationAdvancedFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceBlendOperationAdvancedFeaturesEXT & setAdvancedBlendCoherentOperations( VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations_ ) VULKAN_HPP_NOEXCEPT
+    {
+      advancedBlendCoherentOperations = advancedBlendCoherentOperations_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( advancedBlendCoherentOperations == rhs.advancedBlendCoherentOperations );
+    }
+
+    bool operator!=( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceBlendOperationAdvancedFeaturesEXT ) == sizeof( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceBlendOperationAdvancedFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT>
+  {
+    using Type = PhysicalDeviceBlendOperationAdvancedFeaturesEXT;
+  };
+
+  struct PhysicalDeviceBlendOperationAdvancedPropertiesEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedPropertiesEXT(uint32_t advancedBlendMaxColorAttachments_ = {}, VULKAN_HPP_NAMESPACE::Bool32 advancedBlendIndependentBlend_ = {}, VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedSrcColor_ = {}, VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedDstColor_ = {}, VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCorrelatedOverlap_ = {}, VULKAN_HPP_NAMESPACE::Bool32 advancedBlendAllOperations_ = {}) VULKAN_HPP_NOEXCEPT
+    : advancedBlendMaxColorAttachments( advancedBlendMaxColorAttachments_ ), advancedBlendIndependentBlend( advancedBlendIndependentBlend_ ), advancedBlendNonPremultipliedSrcColor( advancedBlendNonPremultipliedSrcColor_ ), advancedBlendNonPremultipliedDstColor( advancedBlendNonPremultipliedDstColor_ ), advancedBlendCorrelatedOverlap( advancedBlendCorrelatedOverlap_ ), advancedBlendAllOperations( advancedBlendAllOperations_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedPropertiesEXT( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceBlendOperationAdvancedPropertiesEXT( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceBlendOperationAdvancedPropertiesEXT & operator=( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedPropertiesEXT const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceBlendOperationAdvancedPropertiesEXT & operator=( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceBlendOperationAdvancedPropertiesEXT ) );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( advancedBlendMaxColorAttachments == rhs.advancedBlendMaxColorAttachments )
+          && ( advancedBlendIndependentBlend == rhs.advancedBlendIndependentBlend )
+          && ( advancedBlendNonPremultipliedSrcColor == rhs.advancedBlendNonPremultipliedSrcColor )
+          && ( advancedBlendNonPremultipliedDstColor == rhs.advancedBlendNonPremultipliedDstColor )
+          && ( advancedBlendCorrelatedOverlap == rhs.advancedBlendCorrelatedOverlap )
+          && ( advancedBlendAllOperations == rhs.advancedBlendAllOperations );
+    }
+
+    bool operator!=( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT;
+    void* pNext = {};
+    uint32_t advancedBlendMaxColorAttachments = {};
+    VULKAN_HPP_NAMESPACE::Bool32 advancedBlendIndependentBlend = {};
+    VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedSrcColor = {};
+    VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedDstColor = {};
+    VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCorrelatedOverlap = {};
+    VULKAN_HPP_NAMESPACE::Bool32 advancedBlendAllOperations = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceBlendOperationAdvancedPropertiesEXT ) == sizeof( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceBlendOperationAdvancedPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT>
+  {
+    using Type = PhysicalDeviceBlendOperationAdvancedPropertiesEXT;
+  };
+
+  struct PhysicalDeviceBufferDeviceAddressFeatures
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBufferDeviceAddressFeatures;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeatures(VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {}) VULKAN_HPP_NOEXCEPT
+    : bufferDeviceAddress( bufferDeviceAddress_ ), bufferDeviceAddressCaptureReplay( bufferDeviceAddressCaptureReplay_ ), bufferDeviceAddressMultiDevice( bufferDeviceAddressMultiDevice_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeatures( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceBufferDeviceAddressFeatures( VkPhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceBufferDeviceAddressFeatures & operator=( VkPhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeatures const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceBufferDeviceAddressFeatures & operator=( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceBufferDeviceAddressFeatures ) );
+      return *this;
+    }
+
+    PhysicalDeviceBufferDeviceAddressFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceBufferDeviceAddressFeatures & setBufferDeviceAddress( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bufferDeviceAddress = bufferDeviceAddress_;
+      return *this;
+    }
+
+    PhysicalDeviceBufferDeviceAddressFeatures & setBufferDeviceAddressCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_;
+      return *this;
+    }
+
+    PhysicalDeviceBufferDeviceAddressFeatures & setBufferDeviceAddressMultiDevice( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceBufferDeviceAddressFeatures const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceBufferDeviceAddressFeatures*>( this );
+    }
+
+    operator VkPhysicalDeviceBufferDeviceAddressFeatures &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceBufferDeviceAddressFeatures*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceBufferDeviceAddressFeatures const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceBufferDeviceAddressFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( bufferDeviceAddress == rhs.bufferDeviceAddress )
+          && ( bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay )
+          && ( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice );
+    }
+
+    bool operator!=( PhysicalDeviceBufferDeviceAddressFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBufferDeviceAddressFeatures;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress = {};
+    VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay = {};
+    VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceBufferDeviceAddressFeatures ) == sizeof( VkPhysicalDeviceBufferDeviceAddressFeatures ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceBufferDeviceAddressFeatures>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceBufferDeviceAddressFeatures>
+  {
+    using Type = PhysicalDeviceBufferDeviceAddressFeatures;
+  };
+  using PhysicalDeviceBufferDeviceAddressFeaturesKHR = PhysicalDeviceBufferDeviceAddressFeatures;
+
+  struct PhysicalDeviceBufferDeviceAddressFeaturesEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {}) VULKAN_HPP_NOEXCEPT
+    : bufferDeviceAddress( bufferDeviceAddress_ ), bufferDeviceAddressCaptureReplay( bufferDeviceAddressCaptureReplay_ ), bufferDeviceAddressMultiDevice( bufferDeviceAddressMultiDevice_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeaturesEXT( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceBufferDeviceAddressFeaturesEXT( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceBufferDeviceAddressFeaturesEXT & operator=( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceBufferDeviceAddressFeaturesEXT & operator=( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceBufferDeviceAddressFeaturesEXT ) );
+      return *this;
+    }
+
+    PhysicalDeviceBufferDeviceAddressFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceBufferDeviceAddressFeaturesEXT & setBufferDeviceAddress( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bufferDeviceAddress = bufferDeviceAddress_;
+      return *this;
+    }
+
+    PhysicalDeviceBufferDeviceAddressFeaturesEXT & setBufferDeviceAddressCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_;
+      return *this;
+    }
+
+    PhysicalDeviceBufferDeviceAddressFeaturesEXT & setBufferDeviceAddressMultiDevice( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceBufferDeviceAddressFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceBufferDeviceAddressFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceBufferDeviceAddressFeaturesEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceBufferDeviceAddressFeaturesEXT const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceBufferDeviceAddressFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( bufferDeviceAddress == rhs.bufferDeviceAddress )
+          && ( bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay )
+          && ( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice );
+    }
+
+    bool operator!=( PhysicalDeviceBufferDeviceAddressFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress = {};
+    VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay = {};
+    VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceBufferDeviceAddressFeaturesEXT ) == sizeof( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceBufferDeviceAddressFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT>
+  {
+    using Type = PhysicalDeviceBufferDeviceAddressFeaturesEXT;
+  };
+  using PhysicalDeviceBufferAddressFeaturesEXT = PhysicalDeviceBufferDeviceAddressFeaturesEXT;
+
+  struct PhysicalDeviceCoherentMemoryFeaturesAMD
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceCoherentMemoryFeaturesAMD(VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory_ = {}) VULKAN_HPP_NOEXCEPT
+    : deviceCoherentMemory( deviceCoherentMemory_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceCoherentMemoryFeaturesAMD( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceCoherentMemoryFeaturesAMD( VkPhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceCoherentMemoryFeaturesAMD & operator=( VkPhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCoherentMemoryFeaturesAMD const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceCoherentMemoryFeaturesAMD & operator=( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceCoherentMemoryFeaturesAMD ) );
+      return *this;
+    }
+
+    PhysicalDeviceCoherentMemoryFeaturesAMD & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceCoherentMemoryFeaturesAMD & setDeviceCoherentMemory( VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory_ ) VULKAN_HPP_NOEXCEPT
+    {
+      deviceCoherentMemory = deviceCoherentMemory_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceCoherentMemoryFeaturesAMD const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceCoherentMemoryFeaturesAMD*>( this );
+    }
+
+    operator VkPhysicalDeviceCoherentMemoryFeaturesAMD &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceCoherentMemoryFeaturesAMD*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceCoherentMemoryFeaturesAMD const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceCoherentMemoryFeaturesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( deviceCoherentMemory == rhs.deviceCoherentMemory );
+    }
+
+    bool operator!=( PhysicalDeviceCoherentMemoryFeaturesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceCoherentMemoryFeaturesAMD ) == sizeof( VkPhysicalDeviceCoherentMemoryFeaturesAMD ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceCoherentMemoryFeaturesAMD>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD>
+  {
+    using Type = PhysicalDeviceCoherentMemoryFeaturesAMD;
+  };
+
+  struct PhysicalDeviceComputeShaderDerivativesFeaturesNV
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceComputeShaderDerivativesFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads_ = {}, VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear_ = {}) VULKAN_HPP_NOEXCEPT
+    : computeDerivativeGroupQuads( computeDerivativeGroupQuads_ ), computeDerivativeGroupLinear( computeDerivativeGroupLinear_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceComputeShaderDerivativesFeaturesNV( PhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceComputeShaderDerivativesFeaturesNV( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceComputeShaderDerivativesFeaturesNV & operator=( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesNV const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceComputeShaderDerivativesFeaturesNV & operator=( PhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceComputeShaderDerivativesFeaturesNV ) );
+      return *this;
+    }
+
+    PhysicalDeviceComputeShaderDerivativesFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceComputeShaderDerivativesFeaturesNV & setComputeDerivativeGroupQuads( VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads_ ) VULKAN_HPP_NOEXCEPT
+    {
+      computeDerivativeGroupQuads = computeDerivativeGroupQuads_;
+      return *this;
+    }
+
+    PhysicalDeviceComputeShaderDerivativesFeaturesNV & setComputeDerivativeGroupLinear( VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear_ ) VULKAN_HPP_NOEXCEPT
+    {
+      computeDerivativeGroupLinear = computeDerivativeGroupLinear_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceComputeShaderDerivativesFeaturesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceComputeShaderDerivativesFeaturesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceComputeShaderDerivativesFeaturesNV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceComputeShaderDerivativesFeaturesNV const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceComputeShaderDerivativesFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( computeDerivativeGroupQuads == rhs.computeDerivativeGroupQuads )
+          && ( computeDerivativeGroupLinear == rhs.computeDerivativeGroupLinear );
+    }
+
+    bool operator!=( PhysicalDeviceComputeShaderDerivativesFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads = {};
+    VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceComputeShaderDerivativesFeaturesNV ) == sizeof( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceComputeShaderDerivativesFeaturesNV>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV>
+  {
+    using Type = PhysicalDeviceComputeShaderDerivativesFeaturesNV;
+  };
+
+  struct PhysicalDeviceConditionalRenderingFeaturesEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceConditionalRenderingFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering_ = {}, VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering_ = {}) VULKAN_HPP_NOEXCEPT
+    : conditionalRendering( conditionalRendering_ ), inheritedConditionalRendering( inheritedConditionalRendering_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceConditionalRenderingFeaturesEXT( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceConditionalRenderingFeaturesEXT( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceConditionalRenderingFeaturesEXT & operator=( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceConditionalRenderingFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceConditionalRenderingFeaturesEXT & operator=( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceConditionalRenderingFeaturesEXT ) );
+      return *this;
+    }
+
+    PhysicalDeviceConditionalRenderingFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceConditionalRenderingFeaturesEXT & setConditionalRendering( VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering_ ) VULKAN_HPP_NOEXCEPT
+    {
+      conditionalRendering = conditionalRendering_;
+      return *this;
+    }
+
+    PhysicalDeviceConditionalRenderingFeaturesEXT & setInheritedConditionalRendering( VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering_ ) VULKAN_HPP_NOEXCEPT
+    {
+      inheritedConditionalRendering = inheritedConditionalRendering_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceConditionalRenderingFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceConditionalRenderingFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceConditionalRenderingFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceConditionalRenderingFeaturesEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceConditionalRenderingFeaturesEXT const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceConditionalRenderingFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( conditionalRendering == rhs.conditionalRendering )
+          && ( inheritedConditionalRendering == rhs.inheritedConditionalRendering );
+    }
+
+    bool operator!=( PhysicalDeviceConditionalRenderingFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering = {};
+    VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceConditionalRenderingFeaturesEXT ) == sizeof( VkPhysicalDeviceConditionalRenderingFeaturesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceConditionalRenderingFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT>
+  {
+    using Type = PhysicalDeviceConditionalRenderingFeaturesEXT;
+  };
+
+  struct PhysicalDeviceConservativeRasterizationPropertiesEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceConservativeRasterizationPropertiesEXT(float primitiveOverestimationSize_ = {}, float maxExtraPrimitiveOverestimationSize_ = {}, float extraPrimitiveOverestimationSizeGranularity_ = {}, VULKAN_HPP_NAMESPACE::Bool32 primitiveUnderestimation_ = {}, VULKAN_HPP_NAMESPACE::Bool32 conservativePointAndLineRasterization_ = {}, VULKAN_HPP_NAMESPACE::Bool32 degenerateTrianglesRasterized_ = {}, VULKAN_HPP_NAMESPACE::Bool32 degenerateLinesRasterized_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fullyCoveredFragmentShaderInputVariable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 conservativeRasterizationPostDepthCoverage_ = {}) VULKAN_HPP_NOEXCEPT
+    : primitiveOverestimationSize( primitiveOverestimationSize_ ), maxExtraPrimitiveOverestimationSize( maxExtraPrimitiveOverestimationSize_ ), extraPrimitiveOverestimationSizeGranularity( extraPrimitiveOverestimationSizeGranularity_ ), primitiveUnderestimation( primitiveUnderestimation_ ), conservativePointAndLineRasterization( conservativePointAndLineRasterization_ ), degenerateTrianglesRasterized( degenerateTrianglesRasterized_ ), degenerateLinesRasterized( degenerateLinesRasterized_ ), fullyCoveredFragmentShaderInputVariable( fullyCoveredFragmentShaderInputVariable_ ), conservativeRasterizationPostDepthCoverage( conservativeRasterizationPostDepthCoverage_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceConservativeRasterizationPropertiesEXT( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceConservativeRasterizationPropertiesEXT( VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceConservativeRasterizationPropertiesEXT & operator=( VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceConservativeRasterizationPropertiesEXT const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceConservativeRasterizationPropertiesEXT & operator=( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceConservativeRasterizationPropertiesEXT ) );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceConservativeRasterizationPropertiesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceConservativeRasterizationPropertiesEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceConservativeRasterizationPropertiesEXT const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceConservativeRasterizationPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( primitiveOverestimationSize == rhs.primitiveOverestimationSize )
+          && ( maxExtraPrimitiveOverestimationSize == rhs.maxExtraPrimitiveOverestimationSize )
+          && ( extraPrimitiveOverestimationSizeGranularity == rhs.extraPrimitiveOverestimationSizeGranularity )
+          && ( primitiveUnderestimation == rhs.primitiveUnderestimation )
+          && ( conservativePointAndLineRasterization == rhs.conservativePointAndLineRasterization )
+          && ( degenerateTrianglesRasterized == rhs.degenerateTrianglesRasterized )
+          && ( degenerateLinesRasterized == rhs.degenerateLinesRasterized )
+          && ( fullyCoveredFragmentShaderInputVariable == rhs.fullyCoveredFragmentShaderInputVariable )
+          && ( conservativeRasterizationPostDepthCoverage == rhs.conservativeRasterizationPostDepthCoverage );
+    }
+
+    bool operator!=( PhysicalDeviceConservativeRasterizationPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT;
+    void* pNext = {};
+    float primitiveOverestimationSize = {};
+    float maxExtraPrimitiveOverestimationSize = {};
+    float extraPrimitiveOverestimationSizeGranularity = {};
+    VULKAN_HPP_NAMESPACE::Bool32 primitiveUnderestimation = {};
+    VULKAN_HPP_NAMESPACE::Bool32 conservativePointAndLineRasterization = {};
+    VULKAN_HPP_NAMESPACE::Bool32 degenerateTrianglesRasterized = {};
+    VULKAN_HPP_NAMESPACE::Bool32 degenerateLinesRasterized = {};
+    VULKAN_HPP_NAMESPACE::Bool32 fullyCoveredFragmentShaderInputVariable = {};
+    VULKAN_HPP_NAMESPACE::Bool32 conservativeRasterizationPostDepthCoverage = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceConservativeRasterizationPropertiesEXT ) == sizeof( VkPhysicalDeviceConservativeRasterizationPropertiesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceConservativeRasterizationPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT>
+  {
+    using Type = PhysicalDeviceConservativeRasterizationPropertiesEXT;
+  };
+
+  struct PhysicalDeviceCooperativeMatrixFeaturesNV
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix_ = {}, VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess_ = {}) VULKAN_HPP_NOEXCEPT
+    : cooperativeMatrix( cooperativeMatrix_ ), cooperativeMatrixRobustBufferAccess( cooperativeMatrixRobustBufferAccess_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesNV( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceCooperativeMatrixFeaturesNV( VkPhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceCooperativeMatrixFeaturesNV & operator=( VkPhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesNV const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceCooperativeMatrixFeaturesNV & operator=( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceCooperativeMatrixFeaturesNV ) );
+      return *this;
+    }
+
+    PhysicalDeviceCooperativeMatrixFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceCooperativeMatrixFeaturesNV & setCooperativeMatrix( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix_ ) VULKAN_HPP_NOEXCEPT
+    {
+      cooperativeMatrix = cooperativeMatrix_;
+      return *this;
+    }
+
+    PhysicalDeviceCooperativeMatrixFeaturesNV & setCooperativeMatrixRobustBufferAccess( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess_ ) VULKAN_HPP_NOEXCEPT
+    {
+      cooperativeMatrixRobustBufferAccess = cooperativeMatrixRobustBufferAccess_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceCooperativeMatrixFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceCooperativeMatrixFeaturesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceCooperativeMatrixFeaturesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceCooperativeMatrixFeaturesNV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceCooperativeMatrixFeaturesNV const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceCooperativeMatrixFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( cooperativeMatrix == rhs.cooperativeMatrix )
+          && ( cooperativeMatrixRobustBufferAccess == rhs.cooperativeMatrixRobustBufferAccess );
+    }
+
+    bool operator!=( PhysicalDeviceCooperativeMatrixFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix = {};
+    VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceCooperativeMatrixFeaturesNV ) == sizeof( VkPhysicalDeviceCooperativeMatrixFeaturesNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceCooperativeMatrixFeaturesNV>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV>
+  {
+    using Type = PhysicalDeviceCooperativeMatrixFeaturesNV;
+  };
+
+  struct PhysicalDeviceCooperativeMatrixPropertiesNV
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixPropertiesNV(VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeMatrixSupportedStages_ = {}) VULKAN_HPP_NOEXCEPT
+    : cooperativeMatrixSupportedStages( cooperativeMatrixSupportedStages_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixPropertiesNV( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceCooperativeMatrixPropertiesNV( VkPhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceCooperativeMatrixPropertiesNV & operator=( VkPhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesNV const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceCooperativeMatrixPropertiesNV & operator=( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceCooperativeMatrixPropertiesNV ) );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceCooperativeMatrixPropertiesNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceCooperativeMatrixPropertiesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceCooperativeMatrixPropertiesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceCooperativeMatrixPropertiesNV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceCooperativeMatrixPropertiesNV const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceCooperativeMatrixPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( cooperativeMatrixSupportedStages == rhs.cooperativeMatrixSupportedStages );
+    }
+
+    bool operator!=( PhysicalDeviceCooperativeMatrixPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeMatrixSupportedStages = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceCooperativeMatrixPropertiesNV ) == sizeof( VkPhysicalDeviceCooperativeMatrixPropertiesNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceCooperativeMatrixPropertiesNV>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV>
+  {
+    using Type = PhysicalDeviceCooperativeMatrixPropertiesNV;
+  };
+
+  struct PhysicalDeviceCornerSampledImageFeaturesNV
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceCornerSampledImageFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage_ = {}) VULKAN_HPP_NOEXCEPT
+    : cornerSampledImage( cornerSampledImage_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceCornerSampledImageFeaturesNV( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceCornerSampledImageFeaturesNV( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceCornerSampledImageFeaturesNV & operator=( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCornerSampledImageFeaturesNV const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceCornerSampledImageFeaturesNV & operator=( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceCornerSampledImageFeaturesNV ) );
+      return *this;
+    }
+
+    PhysicalDeviceCornerSampledImageFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceCornerSampledImageFeaturesNV & setCornerSampledImage( VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      cornerSampledImage = cornerSampledImage_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceCornerSampledImageFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceCornerSampledImageFeaturesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceCornerSampledImageFeaturesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceCornerSampledImageFeaturesNV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceCornerSampledImageFeaturesNV const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceCornerSampledImageFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( cornerSampledImage == rhs.cornerSampledImage );
+    }
+
+    bool operator!=( PhysicalDeviceCornerSampledImageFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceCornerSampledImageFeaturesNV ) == sizeof( VkPhysicalDeviceCornerSampledImageFeaturesNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceCornerSampledImageFeaturesNV>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV>
+  {
+    using Type = PhysicalDeviceCornerSampledImageFeaturesNV;
+  };
+
+  struct PhysicalDeviceCoverageReductionModeFeaturesNV
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceCoverageReductionModeFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode_ = {}) VULKAN_HPP_NOEXCEPT
+    : coverageReductionMode( coverageReductionMode_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceCoverageReductionModeFeaturesNV( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceCoverageReductionModeFeaturesNV( VkPhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceCoverageReductionModeFeaturesNV & operator=( VkPhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCoverageReductionModeFeaturesNV const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceCoverageReductionModeFeaturesNV & operator=( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceCoverageReductionModeFeaturesNV ) );
+      return *this;
+    }
+
+    PhysicalDeviceCoverageReductionModeFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceCoverageReductionModeFeaturesNV & setCoverageReductionMode( VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      coverageReductionMode = coverageReductionMode_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceCoverageReductionModeFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceCoverageReductionModeFeaturesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceCoverageReductionModeFeaturesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceCoverageReductionModeFeaturesNV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceCoverageReductionModeFeaturesNV const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceCoverageReductionModeFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( coverageReductionMode == rhs.coverageReductionMode );
+    }
+
+    bool operator!=( PhysicalDeviceCoverageReductionModeFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceCoverageReductionModeFeaturesNV ) == sizeof( VkPhysicalDeviceCoverageReductionModeFeaturesNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceCoverageReductionModeFeaturesNV>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV>
+  {
+    using Type = PhysicalDeviceCoverageReductionModeFeaturesNV;
+  };
+
+  struct PhysicalDeviceCustomBorderColorFeaturesEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 customBorderColors_ = {}, VULKAN_HPP_NAMESPACE::Bool32 customBorderColorWithoutFormat_ = {}) VULKAN_HPP_NOEXCEPT
+    : customBorderColors( customBorderColors_ ), customBorderColorWithoutFormat( customBorderColorWithoutFormat_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorFeaturesEXT( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceCustomBorderColorFeaturesEXT( VkPhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceCustomBorderColorFeaturesEXT & operator=( VkPhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceCustomBorderColorFeaturesEXT & operator=( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceCustomBorderColorFeaturesEXT ) );
+      return *this;
+    }
+
+    PhysicalDeviceCustomBorderColorFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceCustomBorderColorFeaturesEXT & setCustomBorderColors( VULKAN_HPP_NAMESPACE::Bool32 customBorderColors_ ) VULKAN_HPP_NOEXCEPT
+    {
+      customBorderColors = customBorderColors_;
+      return *this;
+    }
+
+    PhysicalDeviceCustomBorderColorFeaturesEXT & setCustomBorderColorWithoutFormat( VULKAN_HPP_NAMESPACE::Bool32 customBorderColorWithoutFormat_ ) VULKAN_HPP_NOEXCEPT
+    {
+      customBorderColorWithoutFormat = customBorderColorWithoutFormat_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceCustomBorderColorFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceCustomBorderColorFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceCustomBorderColorFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceCustomBorderColorFeaturesEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceCustomBorderColorFeaturesEXT const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceCustomBorderColorFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( customBorderColors == rhs.customBorderColors )
+          && ( customBorderColorWithoutFormat == rhs.customBorderColorWithoutFormat );
+    }
+
+    bool operator!=( PhysicalDeviceCustomBorderColorFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 customBorderColors = {};
+    VULKAN_HPP_NAMESPACE::Bool32 customBorderColorWithoutFormat = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceCustomBorderColorFeaturesEXT ) == sizeof( VkPhysicalDeviceCustomBorderColorFeaturesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceCustomBorderColorFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT>
+  {
+    using Type = PhysicalDeviceCustomBorderColorFeaturesEXT;
+  };
+
+  struct PhysicalDeviceCustomBorderColorPropertiesEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorPropertiesEXT(uint32_t maxCustomBorderColorSamplers_ = {}) VULKAN_HPP_NOEXCEPT
+    : maxCustomBorderColorSamplers( maxCustomBorderColorSamplers_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorPropertiesEXT( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceCustomBorderColorPropertiesEXT( VkPhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceCustomBorderColorPropertiesEXT & operator=( VkPhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorPropertiesEXT const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceCustomBorderColorPropertiesEXT & operator=( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceCustomBorderColorPropertiesEXT ) );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceCustomBorderColorPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceCustomBorderColorPropertiesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceCustomBorderColorPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceCustomBorderColorPropertiesEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceCustomBorderColorPropertiesEXT const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceCustomBorderColorPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxCustomBorderColorSamplers == rhs.maxCustomBorderColorSamplers );
+    }
+
+    bool operator!=( PhysicalDeviceCustomBorderColorPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT;
+    void* pNext = {};
+    uint32_t maxCustomBorderColorSamplers = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceCustomBorderColorPropertiesEXT ) == sizeof( VkPhysicalDeviceCustomBorderColorPropertiesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceCustomBorderColorPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT>
+  {
+    using Type = PhysicalDeviceCustomBorderColorPropertiesEXT;
+  };
+
+  struct PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing_ = {}) VULKAN_HPP_NOEXCEPT
+    : dedicatedAllocationImageAliasing( dedicatedAllocationImageAliasing_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & operator=( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & operator=( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV ) );
+      return *this;
+    }
+
+    PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & setDedicatedAllocationImageAliasing( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dedicatedAllocationImageAliasing = dedicatedAllocationImageAliasing_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( dedicatedAllocationImageAliasing == rhs.dedicatedAllocationImageAliasing );
+    }
+
+    bool operator!=( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV ) == sizeof( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV>
+  {
+    using Type = PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV;
+  };
+
+  struct PhysicalDeviceDepthClipEnableFeaturesEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipEnableFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ = {}) VULKAN_HPP_NOEXCEPT
+    : depthClipEnable( depthClipEnable_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipEnableFeaturesEXT( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceDepthClipEnableFeaturesEXT( VkPhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceDepthClipEnableFeaturesEXT & operator=( VkPhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipEnableFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceDepthClipEnableFeaturesEXT & operator=( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceDepthClipEnableFeaturesEXT ) );
+      return *this;
+    }
+
+    PhysicalDeviceDepthClipEnableFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceDepthClipEnableFeaturesEXT & setDepthClipEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depthClipEnable = depthClipEnable_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceDepthClipEnableFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceDepthClipEnableFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceDepthClipEnableFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceDepthClipEnableFeaturesEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceDepthClipEnableFeaturesEXT const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceDepthClipEnableFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( depthClipEnable == rhs.depthClipEnable );
+    }
+
+    bool operator!=( PhysicalDeviceDepthClipEnableFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceDepthClipEnableFeaturesEXT ) == sizeof( VkPhysicalDeviceDepthClipEnableFeaturesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceDepthClipEnableFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT>
+  {
+    using Type = PhysicalDeviceDepthClipEnableFeaturesEXT;
+  };
+
+  struct PhysicalDeviceDepthStencilResolveProperties
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDepthStencilResolveProperties;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthStencilResolveProperties(VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes_ = {}, VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes_ = {}, VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone_ = {}, VULKAN_HPP_NAMESPACE::Bool32 independentResolve_ = {}) VULKAN_HPP_NOEXCEPT
+    : supportedDepthResolveModes( supportedDepthResolveModes_ ), supportedStencilResolveModes( supportedStencilResolveModes_ ), independentResolveNone( independentResolveNone_ ), independentResolve( independentResolve_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthStencilResolveProperties( PhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceDepthStencilResolveProperties( VkPhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceDepthStencilResolveProperties & operator=( VkPhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthStencilResolveProperties const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceDepthStencilResolveProperties & operator=( PhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceDepthStencilResolveProperties ) );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceDepthStencilResolveProperties const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceDepthStencilResolveProperties*>( this );
+    }
+
+    operator VkPhysicalDeviceDepthStencilResolveProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceDepthStencilResolveProperties*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceDepthStencilResolveProperties const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceDepthStencilResolveProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( supportedDepthResolveModes == rhs.supportedDepthResolveModes )
+          && ( supportedStencilResolveModes == rhs.supportedStencilResolveModes )
+          && ( independentResolveNone == rhs.independentResolveNone )
+          && ( independentResolve == rhs.independentResolve );
+    }
+
+    bool operator!=( PhysicalDeviceDepthStencilResolveProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthStencilResolveProperties;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes = {};
+    VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes = {};
+    VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone = {};
+    VULKAN_HPP_NAMESPACE::Bool32 independentResolve = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceDepthStencilResolveProperties ) == sizeof( VkPhysicalDeviceDepthStencilResolveProperties ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceDepthStencilResolveProperties>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceDepthStencilResolveProperties>
+  {
+    using Type = PhysicalDeviceDepthStencilResolveProperties;
+  };
+  using PhysicalDeviceDepthStencilResolvePropertiesKHR = PhysicalDeviceDepthStencilResolveProperties;
+
+  struct PhysicalDeviceDescriptorIndexingFeatures
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorIndexingFeatures;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingFeatures(VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ = {}, VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ = {}) VULKAN_HPP_NOEXCEPT
+    : shaderInputAttachmentArrayDynamicIndexing( shaderInputAttachmentArrayDynamicIndexing_ ), shaderUniformTexelBufferArrayDynamicIndexing( shaderUniformTexelBufferArrayDynamicIndexing_ ), shaderStorageTexelBufferArrayDynamicIndexing( shaderStorageTexelBufferArrayDynamicIndexing_ ), shaderUniformBufferArrayNonUniformIndexing( shaderUniformBufferArrayNonUniformIndexing_ ), shaderSampledImageArrayNonUniformIndexing( shaderSampledImageArrayNonUniformIndexing_ ), shaderStorageBufferArrayNonUniformIndexing( shaderStorageBufferArrayNonUniformIndexing_ ), shaderStorageImageArrayNonUniformIndexing( shaderStorageImageArrayNonUniformIndexing_ ), shaderInputAttachmentArrayNonUniformIndexing( shaderInputAttachmentArrayNonUniformIndexing_ ), shaderUniformTexelBufferArrayNonUniformIndexing( shaderUniformTexelBufferArrayNonUniformIndexing_ ), shaderStorageTexelBufferArrayNonUniformIndexing( shaderStorageTexelBufferArrayNonUniformIndexing_ ), descriptorBindingUniformBufferUpdateAfterBind( descriptorBindingUniformBufferUpdateAfterBind_ ), descriptorBindingSampledImageUpdateAfterBind( descriptorBindingSampledImageUpdateAfterBind_ ), descriptorBindingStorageImageUpdateAfterBind( descriptorBindingStorageImageUpdateAfterBind_ ), descriptorBindingStorageBufferUpdateAfterBind( descriptorBindingStorageBufferUpdateAfterBind_ ), descriptorBindingUniformTexelBufferUpdateAfterBind( descriptorBindingUniformTexelBufferUpdateAfterBind_ ), descriptorBindingStorageTexelBufferUpdateAfterBind( descriptorBindingStorageTexelBufferUpdateAfterBind_ ), descriptorBindingUpdateUnusedWhilePending( descriptorBindingUpdateUnusedWhilePending_ ), descriptorBindingPartiallyBound( descriptorBindingPartiallyBound_ ), descriptorBindingVariableDescriptorCount( descriptorBindingVariableDescriptorCount_ ), runtimeDescriptorArray( runtimeDescriptorArray_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingFeatures( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceDescriptorIndexingFeatures( VkPhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceDescriptorIndexingFeatures & operator=( VkPhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingFeatures const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceDescriptorIndexingFeatures & operator=( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceDescriptorIndexingFeatures ) );
+      return *this;
+    }
+
+    PhysicalDeviceDescriptorIndexingFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceDescriptorIndexingFeatures & setShaderInputAttachmentArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderInputAttachmentArrayDynamicIndexing = shaderInputAttachmentArrayDynamicIndexing_;
+      return *this;
+    }
+
+    PhysicalDeviceDescriptorIndexingFeatures & setShaderUniformTexelBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderUniformTexelBufferArrayDynamicIndexing = shaderUniformTexelBufferArrayDynamicIndexing_;
+      return *this;
+    }
+
+    PhysicalDeviceDescriptorIndexingFeatures & setShaderStorageTexelBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderStorageTexelBufferArrayDynamicIndexing = shaderStorageTexelBufferArrayDynamicIndexing_;
+      return *this;
+    }
+
+    PhysicalDeviceDescriptorIndexingFeatures & setShaderUniformBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderUniformBufferArrayNonUniformIndexing = shaderUniformBufferArrayNonUniformIndexing_;
+      return *this;
+    }
+
+    PhysicalDeviceDescriptorIndexingFeatures & setShaderSampledImageArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderSampledImageArrayNonUniformIndexing = shaderSampledImageArrayNonUniformIndexing_;
+      return *this;
+    }
+
+    PhysicalDeviceDescriptorIndexingFeatures & setShaderStorageBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderStorageBufferArrayNonUniformIndexing = shaderStorageBufferArrayNonUniformIndexing_;
+      return *this;
+    }
+
+    PhysicalDeviceDescriptorIndexingFeatures & setShaderStorageImageArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderStorageImageArrayNonUniformIndexing = shaderStorageImageArrayNonUniformIndexing_;
+      return *this;
+    }
+
+    PhysicalDeviceDescriptorIndexingFeatures & setShaderInputAttachmentArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderInputAttachmentArrayNonUniformIndexing = shaderInputAttachmentArrayNonUniformIndexing_;
+      return *this;
+    }
+
+    PhysicalDeviceDescriptorIndexingFeatures & setShaderUniformTexelBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderUniformTexelBufferArrayNonUniformIndexing = shaderUniformTexelBufferArrayNonUniformIndexing_;
+      return *this;
+    }
+
+    PhysicalDeviceDescriptorIndexingFeatures & setShaderStorageTexelBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderStorageTexelBufferArrayNonUniformIndexing = shaderStorageTexelBufferArrayNonUniformIndexing_;
+      return *this;
+    }
+
+    PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingUniformBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorBindingUniformBufferUpdateAfterBind = descriptorBindingUniformBufferUpdateAfterBind_;
+      return *this;
+    }
+
+    PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingSampledImageUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorBindingSampledImageUpdateAfterBind = descriptorBindingSampledImageUpdateAfterBind_;
+      return *this;
+    }
+
+    PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingStorageImageUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorBindingStorageImageUpdateAfterBind = descriptorBindingStorageImageUpdateAfterBind_;
+      return *this;
+    }
+
+    PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingStorageBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorBindingStorageBufferUpdateAfterBind = descriptorBindingStorageBufferUpdateAfterBind_;
+      return *this;
+    }
+
+    PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingUniformTexelBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorBindingUniformTexelBufferUpdateAfterBind = descriptorBindingUniformTexelBufferUpdateAfterBind_;
+      return *this;
+    }
+
+    PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingStorageTexelBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorBindingStorageTexelBufferUpdateAfterBind = descriptorBindingStorageTexelBufferUpdateAfterBind_;
+      return *this;
+    }
+
+    PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingUpdateUnusedWhilePending( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorBindingUpdateUnusedWhilePending = descriptorBindingUpdateUnusedWhilePending_;
+      return *this;
+    }
+
+    PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingPartiallyBound( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorBindingPartiallyBound = descriptorBindingPartiallyBound_;
+      return *this;
+    }
+
+    PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingVariableDescriptorCount( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorBindingVariableDescriptorCount = descriptorBindingVariableDescriptorCount_;
+      return *this;
+    }
+
+    PhysicalDeviceDescriptorIndexingFeatures & setRuntimeDescriptorArray( VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ ) VULKAN_HPP_NOEXCEPT
+    {
+      runtimeDescriptorArray = runtimeDescriptorArray_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceDescriptorIndexingFeatures const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceDescriptorIndexingFeatures*>( this );
+    }
+
+    operator VkPhysicalDeviceDescriptorIndexingFeatures &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceDescriptorIndexingFeatures*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceDescriptorIndexingFeatures const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceDescriptorIndexingFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shaderInputAttachmentArrayDynamicIndexing == rhs.shaderInputAttachmentArrayDynamicIndexing )
+          && ( shaderUniformTexelBufferArrayDynamicIndexing == rhs.shaderUniformTexelBufferArrayDynamicIndexing )
+          && ( shaderStorageTexelBufferArrayDynamicIndexing == rhs.shaderStorageTexelBufferArrayDynamicIndexing )
+          && ( shaderUniformBufferArrayNonUniformIndexing == rhs.shaderUniformBufferArrayNonUniformIndexing )
+          && ( shaderSampledImageArrayNonUniformIndexing == rhs.shaderSampledImageArrayNonUniformIndexing )
+          && ( shaderStorageBufferArrayNonUniformIndexing == rhs.shaderStorageBufferArrayNonUniformIndexing )
+          && ( shaderStorageImageArrayNonUniformIndexing == rhs.shaderStorageImageArrayNonUniformIndexing )
+          && ( shaderInputAttachmentArrayNonUniformIndexing == rhs.shaderInputAttachmentArrayNonUniformIndexing )
+          && ( shaderUniformTexelBufferArrayNonUniformIndexing == rhs.shaderUniformTexelBufferArrayNonUniformIndexing )
+          && ( shaderStorageTexelBufferArrayNonUniformIndexing == rhs.shaderStorageTexelBufferArrayNonUniformIndexing )
+          && ( descriptorBindingUniformBufferUpdateAfterBind == rhs.descriptorBindingUniformBufferUpdateAfterBind )
+          && ( descriptorBindingSampledImageUpdateAfterBind == rhs.descriptorBindingSampledImageUpdateAfterBind )
+          && ( descriptorBindingStorageImageUpdateAfterBind == rhs.descriptorBindingStorageImageUpdateAfterBind )
+          && ( descriptorBindingStorageBufferUpdateAfterBind == rhs.descriptorBindingStorageBufferUpdateAfterBind )
+          && ( descriptorBindingUniformTexelBufferUpdateAfterBind == rhs.descriptorBindingUniformTexelBufferUpdateAfterBind )
+          && ( descriptorBindingStorageTexelBufferUpdateAfterBind == rhs.descriptorBindingStorageTexelBufferUpdateAfterBind )
+          && ( descriptorBindingUpdateUnusedWhilePending == rhs.descriptorBindingUpdateUnusedWhilePending )
+          && ( descriptorBindingPartiallyBound == rhs.descriptorBindingPartiallyBound )
+          && ( descriptorBindingVariableDescriptorCount == rhs.descriptorBindingVariableDescriptorCount )
+          && ( runtimeDescriptorArray == rhs.runtimeDescriptorArray );
+    }
+
+    bool operator!=( PhysicalDeviceDescriptorIndexingFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingFeatures;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing = {};
+    VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind = {};
+    VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind = {};
+    VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind = {};
+    VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind = {};
+    VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind = {};
+    VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind = {};
+    VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending = {};
+    VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound = {};
+    VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount = {};
+    VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceDescriptorIndexingFeatures ) == sizeof( VkPhysicalDeviceDescriptorIndexingFeatures ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceDescriptorIndexingFeatures>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceDescriptorIndexingFeatures>
+  {
+    using Type = PhysicalDeviceDescriptorIndexingFeatures;
+  };
+  using PhysicalDeviceDescriptorIndexingFeaturesEXT = PhysicalDeviceDescriptorIndexingFeatures;
+
+  struct PhysicalDeviceDescriptorIndexingProperties
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorIndexingProperties;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingProperties(uint32_t maxUpdateAfterBindDescriptorsInAllPools_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindSamplers_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments_ = {}, uint32_t maxPerStageUpdateAfterBindResources_ = {}, uint32_t maxDescriptorSetUpdateAfterBindSamplers_ = {}, uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers_ = {}, uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ = {}, uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers_ = {}, uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ = {}, uint32_t maxDescriptorSetUpdateAfterBindSampledImages_ = {}, uint32_t maxDescriptorSetUpdateAfterBindStorageImages_ = {}, uint32_t maxDescriptorSetUpdateAfterBindInputAttachments_ = {}) VULKAN_HPP_NOEXCEPT
+    : maxUpdateAfterBindDescriptorsInAllPools( maxUpdateAfterBindDescriptorsInAllPools_ ), shaderUniformBufferArrayNonUniformIndexingNative( shaderUniformBufferArrayNonUniformIndexingNative_ ), shaderSampledImageArrayNonUniformIndexingNative( shaderSampledImageArrayNonUniformIndexingNative_ ), shaderStorageBufferArrayNonUniformIndexingNative( shaderStorageBufferArrayNonUniformIndexingNative_ ), shaderStorageImageArrayNonUniformIndexingNative( shaderStorageImageArrayNonUniformIndexingNative_ ), shaderInputAttachmentArrayNonUniformIndexingNative( shaderInputAttachmentArrayNonUniformIndexingNative_ ), robustBufferAccessUpdateAfterBind( robustBufferAccessUpdateAfterBind_ ), quadDivergentImplicitLod( quadDivergentImplicitLod_ ), maxPerStageDescriptorUpdateAfterBindSamplers( maxPerStageDescriptorUpdateAfterBindSamplers_ ), maxPerStageDescriptorUpdateAfterBindUniformBuffers( maxPerStageDescriptorUpdateAfterBindUniformBuffers_ ), maxPerStageDescriptorUpdateAfterBindStorageBuffers( maxPerStageDescriptorUpdateAfterBindStorageBuffers_ ), maxPerStageDescriptorUpdateAfterBindSampledImages( maxPerStageDescriptorUpdateAfterBindSampledImages_ ), maxPerStageDescriptorUpdateAfterBindStorageImages( maxPerStageDescriptorUpdateAfterBindStorageImages_ ), maxPerStageDescriptorUpdateAfterBindInputAttachments( maxPerStageDescriptorUpdateAfterBindInputAttachments_ ), maxPerStageUpdateAfterBindResources( maxPerStageUpdateAfterBindResources_ ), maxDescriptorSetUpdateAfterBindSamplers( maxDescriptorSetUpdateAfterBindSamplers_ ), maxDescriptorSetUpdateAfterBindUniformBuffers( maxDescriptorSetUpdateAfterBindUniformBuffers_ ), maxDescriptorSetUpdateAfterBindUniformBuffersDynamic( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ ), maxDescriptorSetUpdateAfterBindStorageBuffers( maxDescriptorSetUpdateAfterBindStorageBuffers_ ), maxDescriptorSetUpdateAfterBindStorageBuffersDynamic( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ ), maxDescriptorSetUpdateAfterBindSampledImages( maxDescriptorSetUpdateAfterBindSampledImages_ ), maxDescriptorSetUpdateAfterBindStorageImages( maxDescriptorSetUpdateAfterBindStorageImages_ ), maxDescriptorSetUpdateAfterBindInputAttachments( maxDescriptorSetUpdateAfterBindInputAttachments_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingProperties( PhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceDescriptorIndexingProperties( VkPhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceDescriptorIndexingProperties & operator=( VkPhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingProperties const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceDescriptorIndexingProperties & operator=( PhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceDescriptorIndexingProperties ) );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceDescriptorIndexingProperties const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceDescriptorIndexingProperties*>( this );
+    }
+
+    operator VkPhysicalDeviceDescriptorIndexingProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceDescriptorIndexingProperties*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceDescriptorIndexingProperties const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceDescriptorIndexingProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxUpdateAfterBindDescriptorsInAllPools == rhs.maxUpdateAfterBindDescriptorsInAllPools )
+          && ( shaderUniformBufferArrayNonUniformIndexingNative == rhs.shaderUniformBufferArrayNonUniformIndexingNative )
+          && ( shaderSampledImageArrayNonUniformIndexingNative == rhs.shaderSampledImageArrayNonUniformIndexingNative )
+          && ( shaderStorageBufferArrayNonUniformIndexingNative == rhs.shaderStorageBufferArrayNonUniformIndexingNative )
+          && ( shaderStorageImageArrayNonUniformIndexingNative == rhs.shaderStorageImageArrayNonUniformIndexingNative )
+          && ( shaderInputAttachmentArrayNonUniformIndexingNative == rhs.shaderInputAttachmentArrayNonUniformIndexingNative )
+          && ( robustBufferAccessUpdateAfterBind == rhs.robustBufferAccessUpdateAfterBind )
+          && ( quadDivergentImplicitLod == rhs.quadDivergentImplicitLod )
+          && ( maxPerStageDescriptorUpdateAfterBindSamplers == rhs.maxPerStageDescriptorUpdateAfterBindSamplers )
+          && ( maxPerStageDescriptorUpdateAfterBindUniformBuffers == rhs.maxPerStageDescriptorUpdateAfterBindUniformBuffers )
+          && ( maxPerStageDescriptorUpdateAfterBindStorageBuffers == rhs.maxPerStageDescriptorUpdateAfterBindStorageBuffers )
+          && ( maxPerStageDescriptorUpdateAfterBindSampledImages == rhs.maxPerStageDescriptorUpdateAfterBindSampledImages )
+          && ( maxPerStageDescriptorUpdateAfterBindStorageImages == rhs.maxPerStageDescriptorUpdateAfterBindStorageImages )
+          && ( maxPerStageDescriptorUpdateAfterBindInputAttachments == rhs.maxPerStageDescriptorUpdateAfterBindInputAttachments )
+          && ( maxPerStageUpdateAfterBindResources == rhs.maxPerStageUpdateAfterBindResources )
+          && ( maxDescriptorSetUpdateAfterBindSamplers == rhs.maxDescriptorSetUpdateAfterBindSamplers )
+          && ( maxDescriptorSetUpdateAfterBindUniformBuffers == rhs.maxDescriptorSetUpdateAfterBindUniformBuffers )
+          && ( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic )
+          && ( maxDescriptorSetUpdateAfterBindStorageBuffers == rhs.maxDescriptorSetUpdateAfterBindStorageBuffers )
+          && ( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic )
+          && ( maxDescriptorSetUpdateAfterBindSampledImages == rhs.maxDescriptorSetUpdateAfterBindSampledImages )
+          && ( maxDescriptorSetUpdateAfterBindStorageImages == rhs.maxDescriptorSetUpdateAfterBindStorageImages )
+          && ( maxDescriptorSetUpdateAfterBindInputAttachments == rhs.maxDescriptorSetUpdateAfterBindInputAttachments );
+    }
+
+    bool operator!=( PhysicalDeviceDescriptorIndexingProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingProperties;
+    void* pNext = {};
+    uint32_t maxUpdateAfterBindDescriptorsInAllPools = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative = {};
+    VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind = {};
+    VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod = {};
+    uint32_t maxPerStageDescriptorUpdateAfterBindSamplers = {};
+    uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers = {};
+    uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers = {};
+    uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages = {};
+    uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages = {};
+    uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments = {};
+    uint32_t maxPerStageUpdateAfterBindResources = {};
+    uint32_t maxDescriptorSetUpdateAfterBindSamplers = {};
+    uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers = {};
+    uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = {};
+    uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers = {};
+    uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = {};
+    uint32_t maxDescriptorSetUpdateAfterBindSampledImages = {};
+    uint32_t maxDescriptorSetUpdateAfterBindStorageImages = {};
+    uint32_t maxDescriptorSetUpdateAfterBindInputAttachments = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceDescriptorIndexingProperties ) == sizeof( VkPhysicalDeviceDescriptorIndexingProperties ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceDescriptorIndexingProperties>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceDescriptorIndexingProperties>
+  {
+    using Type = PhysicalDeviceDescriptorIndexingProperties;
+  };
+  using PhysicalDeviceDescriptorIndexingPropertiesEXT = PhysicalDeviceDescriptorIndexingProperties;
+
+  struct PhysicalDeviceDeviceGeneratedCommandsFeaturesNV
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands_ = {}) VULKAN_HPP_NOEXCEPT
+    : deviceGeneratedCommands( deviceGeneratedCommands_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & operator=( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & operator=( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV ) );
+      return *this;
+    }
+
+    PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & setDeviceGeneratedCommands( VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands_ ) VULKAN_HPP_NOEXCEPT
+    {
+      deviceGeneratedCommands = deviceGeneratedCommands_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( deviceGeneratedCommands == rhs.deviceGeneratedCommands );
+    }
+
+    bool operator!=( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV ) == sizeof( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceDeviceGeneratedCommandsFeaturesNV>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV>
+  {
+    using Type = PhysicalDeviceDeviceGeneratedCommandsFeaturesNV;
+  };
+
+  struct PhysicalDeviceDeviceGeneratedCommandsPropertiesNV
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsPropertiesNV(uint32_t maxGraphicsShaderGroupCount_ = {}, uint32_t maxIndirectSequenceCount_ = {}, uint32_t maxIndirectCommandsTokenCount_ = {}, uint32_t maxIndirectCommandsStreamCount_ = {}, uint32_t maxIndirectCommandsTokenOffset_ = {}, uint32_t maxIndirectCommandsStreamStride_ = {}, uint32_t minSequencesCountBufferOffsetAlignment_ = {}, uint32_t minSequencesIndexBufferOffsetAlignment_ = {}, uint32_t minIndirectCommandsBufferOffsetAlignment_ = {}) VULKAN_HPP_NOEXCEPT
+    : maxGraphicsShaderGroupCount( maxGraphicsShaderGroupCount_ ), maxIndirectSequenceCount( maxIndirectSequenceCount_ ), maxIndirectCommandsTokenCount( maxIndirectCommandsTokenCount_ ), maxIndirectCommandsStreamCount( maxIndirectCommandsStreamCount_ ), maxIndirectCommandsTokenOffset( maxIndirectCommandsTokenOffset_ ), maxIndirectCommandsStreamStride( maxIndirectCommandsStreamStride_ ), minSequencesCountBufferOffsetAlignment( minSequencesCountBufferOffsetAlignment_ ), minSequencesIndexBufferOffsetAlignment( minSequencesIndexBufferOffsetAlignment_ ), minIndirectCommandsBufferOffsetAlignment( minIndirectCommandsBufferOffsetAlignment_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceDeviceGeneratedCommandsPropertiesNV & operator=( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceDeviceGeneratedCommandsPropertiesNV & operator=( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV ) );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxGraphicsShaderGroupCount == rhs.maxGraphicsShaderGroupCount )
+          && ( maxIndirectSequenceCount == rhs.maxIndirectSequenceCount )
+          && ( maxIndirectCommandsTokenCount == rhs.maxIndirectCommandsTokenCount )
+          && ( maxIndirectCommandsStreamCount == rhs.maxIndirectCommandsStreamCount )
+          && ( maxIndirectCommandsTokenOffset == rhs.maxIndirectCommandsTokenOffset )
+          && ( maxIndirectCommandsStreamStride == rhs.maxIndirectCommandsStreamStride )
+          && ( minSequencesCountBufferOffsetAlignment == rhs.minSequencesCountBufferOffsetAlignment )
+          && ( minSequencesIndexBufferOffsetAlignment == rhs.minSequencesIndexBufferOffsetAlignment )
+          && ( minIndirectCommandsBufferOffsetAlignment == rhs.minIndirectCommandsBufferOffsetAlignment );
+    }
+
+    bool operator!=( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV;
+    void* pNext = {};
+    uint32_t maxGraphicsShaderGroupCount = {};
+    uint32_t maxIndirectSequenceCount = {};
+    uint32_t maxIndirectCommandsTokenCount = {};
+    uint32_t maxIndirectCommandsStreamCount = {};
+    uint32_t maxIndirectCommandsTokenOffset = {};
+    uint32_t maxIndirectCommandsStreamStride = {};
+    uint32_t minSequencesCountBufferOffsetAlignment = {};
+    uint32_t minSequencesIndexBufferOffsetAlignment = {};
+    uint32_t minIndirectCommandsBufferOffsetAlignment = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV ) == sizeof( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceDeviceGeneratedCommandsPropertiesNV>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV>
+  {
+    using Type = PhysicalDeviceDeviceGeneratedCommandsPropertiesNV;
+  };
+
+  struct PhysicalDeviceDeviceMemoryReportFeaturesEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDeviceMemoryReportFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceMemoryReportFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 deviceMemoryReport_ = {}) VULKAN_HPP_NOEXCEPT
+    : deviceMemoryReport( deviceMemoryReport_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceMemoryReportFeaturesEXT( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceDeviceMemoryReportFeaturesEXT( VkPhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceDeviceMemoryReportFeaturesEXT & operator=( VkPhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceMemoryReportFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceDeviceMemoryReportFeaturesEXT & operator=( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceDeviceMemoryReportFeaturesEXT ) );
+      return *this;
+    }
+
+    PhysicalDeviceDeviceMemoryReportFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceDeviceMemoryReportFeaturesEXT & setDeviceMemoryReport( VULKAN_HPP_NAMESPACE::Bool32 deviceMemoryReport_ ) VULKAN_HPP_NOEXCEPT
+    {
+      deviceMemoryReport = deviceMemoryReport_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceDeviceMemoryReportFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceDeviceMemoryReportFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceDeviceMemoryReportFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceDeviceMemoryReportFeaturesEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceDeviceMemoryReportFeaturesEXT const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceDeviceMemoryReportFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( deviceMemoryReport == rhs.deviceMemoryReport );
+    }
+
+    bool operator!=( PhysicalDeviceDeviceMemoryReportFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceMemoryReportFeaturesEXT;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 deviceMemoryReport = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceDeviceMemoryReportFeaturesEXT ) == sizeof( VkPhysicalDeviceDeviceMemoryReportFeaturesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceDeviceMemoryReportFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceDeviceMemoryReportFeaturesEXT>
+  {
+    using Type = PhysicalDeviceDeviceMemoryReportFeaturesEXT;
+  };
+
+  struct PhysicalDeviceDiagnosticsConfigFeaturesNV
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceDiagnosticsConfigFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 diagnosticsConfig_ = {}) VULKAN_HPP_NOEXCEPT
+    : diagnosticsConfig( diagnosticsConfig_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceDiagnosticsConfigFeaturesNV( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceDiagnosticsConfigFeaturesNV( VkPhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceDiagnosticsConfigFeaturesNV & operator=( VkPhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDiagnosticsConfigFeaturesNV const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceDiagnosticsConfigFeaturesNV & operator=( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceDiagnosticsConfigFeaturesNV ) );
+      return *this;
+    }
+
+    PhysicalDeviceDiagnosticsConfigFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceDiagnosticsConfigFeaturesNV & setDiagnosticsConfig( VULKAN_HPP_NAMESPACE::Bool32 diagnosticsConfig_ ) VULKAN_HPP_NOEXCEPT
+    {
+      diagnosticsConfig = diagnosticsConfig_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceDiagnosticsConfigFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceDiagnosticsConfigFeaturesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceDiagnosticsConfigFeaturesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceDiagnosticsConfigFeaturesNV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceDiagnosticsConfigFeaturesNV const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceDiagnosticsConfigFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( diagnosticsConfig == rhs.diagnosticsConfig );
+    }
+
+    bool operator!=( PhysicalDeviceDiagnosticsConfigFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 diagnosticsConfig = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceDiagnosticsConfigFeaturesNV ) == sizeof( VkPhysicalDeviceDiagnosticsConfigFeaturesNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceDiagnosticsConfigFeaturesNV>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV>
+  {
+    using Type = PhysicalDeviceDiagnosticsConfigFeaturesNV;
+  };
+
+  struct PhysicalDeviceDiscardRectanglePropertiesEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceDiscardRectanglePropertiesEXT(uint32_t maxDiscardRectangles_ = {}) VULKAN_HPP_NOEXCEPT
+    : maxDiscardRectangles( maxDiscardRectangles_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceDiscardRectanglePropertiesEXT( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceDiscardRectanglePropertiesEXT( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceDiscardRectanglePropertiesEXT & operator=( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDiscardRectanglePropertiesEXT const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceDiscardRectanglePropertiesEXT & operator=( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceDiscardRectanglePropertiesEXT ) );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceDiscardRectanglePropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceDiscardRectanglePropertiesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceDiscardRectanglePropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceDiscardRectanglePropertiesEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceDiscardRectanglePropertiesEXT const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceDiscardRectanglePropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxDiscardRectangles == rhs.maxDiscardRectangles );
+    }
+
+    bool operator!=( PhysicalDeviceDiscardRectanglePropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT;
+    void* pNext = {};
+    uint32_t maxDiscardRectangles = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceDiscardRectanglePropertiesEXT ) == sizeof( VkPhysicalDeviceDiscardRectanglePropertiesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceDiscardRectanglePropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT>
+  {
+    using Type = PhysicalDeviceDiscardRectanglePropertiesEXT;
+  };
+
+  struct PhysicalDeviceDriverProperties
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDriverProperties;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDriverProperties(VULKAN_HPP_NAMESPACE::DriverId driverID_ = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary, std::array<char,VK_MAX_DRIVER_NAME_SIZE> const& driverName_ = {}, std::array<char,VK_MAX_DRIVER_INFO_SIZE> const& driverInfo_ = {}, VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion_ = {}) VULKAN_HPP_NOEXCEPT
+    : driverID( driverID_ ), driverName( driverName_ ), driverInfo( driverInfo_ ), conformanceVersion( conformanceVersion_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDriverProperties( PhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceDriverProperties( VkPhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceDriverProperties & operator=( VkPhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDriverProperties const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceDriverProperties & operator=( PhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceDriverProperties ) );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceDriverProperties const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceDriverProperties*>( this );
+    }
+
+    operator VkPhysicalDeviceDriverProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceDriverProperties*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceDriverProperties const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceDriverProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( driverID == rhs.driverID )
+          && ( driverName == rhs.driverName )
+          && ( driverInfo == rhs.driverInfo )
+          && ( conformanceVersion == rhs.conformanceVersion );
+    }
+
+    bool operator!=( PhysicalDeviceDriverProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDriverProperties;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::DriverId driverID = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary;
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DRIVER_NAME_SIZE> driverName = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DRIVER_INFO_SIZE> driverInfo = {};
+    VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceDriverProperties ) == sizeof( VkPhysicalDeviceDriverProperties ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceDriverProperties>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceDriverProperties>
+  {
+    using Type = PhysicalDeviceDriverProperties;
+  };
+  using PhysicalDeviceDriverPropertiesKHR = PhysicalDeviceDriverProperties;
+
+  struct PhysicalDeviceExclusiveScissorFeaturesNV
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceExclusiveScissorFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor_ = {}) VULKAN_HPP_NOEXCEPT
+    : exclusiveScissor( exclusiveScissor_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceExclusiveScissorFeaturesNV( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceExclusiveScissorFeaturesNV( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceExclusiveScissorFeaturesNV & operator=( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExclusiveScissorFeaturesNV const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceExclusiveScissorFeaturesNV & operator=( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceExclusiveScissorFeaturesNV ) );
+      return *this;
+    }
+
+    PhysicalDeviceExclusiveScissorFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceExclusiveScissorFeaturesNV & setExclusiveScissor( VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor_ ) VULKAN_HPP_NOEXCEPT
+    {
+      exclusiveScissor = exclusiveScissor_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceExclusiveScissorFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceExclusiveScissorFeaturesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceExclusiveScissorFeaturesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceExclusiveScissorFeaturesNV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceExclusiveScissorFeaturesNV const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceExclusiveScissorFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( exclusiveScissor == rhs.exclusiveScissor );
+    }
+
+    bool operator!=( PhysicalDeviceExclusiveScissorFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceExclusiveScissorFeaturesNV ) == sizeof( VkPhysicalDeviceExclusiveScissorFeaturesNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceExclusiveScissorFeaturesNV>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV>
+  {
+    using Type = PhysicalDeviceExclusiveScissorFeaturesNV;
+  };
+
+  struct PhysicalDeviceExtendedDynamicStateFeaturesEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicStateFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState_ = {}) VULKAN_HPP_NOEXCEPT
+    : extendedDynamicState( extendedDynamicState_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicStateFeaturesEXT( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceExtendedDynamicStateFeaturesEXT( VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceExtendedDynamicStateFeaturesEXT & operator=( VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicStateFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceExtendedDynamicStateFeaturesEXT & operator=( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceExtendedDynamicStateFeaturesEXT ) );
+      return *this;
+    }
+
+    PhysicalDeviceExtendedDynamicStateFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceExtendedDynamicStateFeaturesEXT & setExtendedDynamicState( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState_ ) VULKAN_HPP_NOEXCEPT
+    {
+      extendedDynamicState = extendedDynamicState_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceExtendedDynamicStateFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceExtendedDynamicStateFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceExtendedDynamicStateFeaturesEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceExtendedDynamicStateFeaturesEXT const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceExtendedDynamicStateFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( extendedDynamicState == rhs.extendedDynamicState );
+    }
+
+    bool operator!=( PhysicalDeviceExtendedDynamicStateFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceExtendedDynamicStateFeaturesEXT ) == sizeof( VkPhysicalDeviceExtendedDynamicStateFeaturesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceExtendedDynamicStateFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT>
+  {
+    using Type = PhysicalDeviceExtendedDynamicStateFeaturesEXT;
+  };
+
+  struct PhysicalDeviceExternalImageFormatInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalImageFormatInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalImageFormatInfo(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd) VULKAN_HPP_NOEXCEPT
+    : handleType( handleType_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalImageFormatInfo( PhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceExternalImageFormatInfo( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceExternalImageFormatInfo & operator=( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalImageFormatInfo const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceExternalImageFormatInfo & operator=( PhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceExternalImageFormatInfo ) );
+      return *this;
+    }
+
+    PhysicalDeviceExternalImageFormatInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceExternalImageFormatInfo & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceExternalImageFormatInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceExternalImageFormatInfo*>( this );
+    }
+
+    operator VkPhysicalDeviceExternalImageFormatInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceExternalImageFormatInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceExternalImageFormatInfo const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceExternalImageFormatInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( handleType == rhs.handleType );
+    }
+
+    bool operator!=( PhysicalDeviceExternalImageFormatInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalImageFormatInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
+
+  };
+  static_assert( sizeof( PhysicalDeviceExternalImageFormatInfo ) == sizeof( VkPhysicalDeviceExternalImageFormatInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceExternalImageFormatInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceExternalImageFormatInfo>
+  {
+    using Type = PhysicalDeviceExternalImageFormatInfo;
+  };
+  using PhysicalDeviceExternalImageFormatInfoKHR = PhysicalDeviceExternalImageFormatInfo;
+
+  struct PhysicalDeviceExternalMemoryHostPropertiesEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryHostPropertiesEXT(VULKAN_HPP_NAMESPACE::DeviceSize minImportedHostPointerAlignment_ = {}) VULKAN_HPP_NOEXCEPT
+    : minImportedHostPointerAlignment( minImportedHostPointerAlignment_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryHostPropertiesEXT( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceExternalMemoryHostPropertiesEXT( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceExternalMemoryHostPropertiesEXT & operator=( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryHostPropertiesEXT const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceExternalMemoryHostPropertiesEXT & operator=( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceExternalMemoryHostPropertiesEXT ) );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceExternalMemoryHostPropertiesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceExternalMemoryHostPropertiesEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceExternalMemoryHostPropertiesEXT const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceExternalMemoryHostPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( minImportedHostPointerAlignment == rhs.minImportedHostPointerAlignment );
+    }
+
+    bool operator!=( PhysicalDeviceExternalMemoryHostPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize minImportedHostPointerAlignment = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceExternalMemoryHostPropertiesEXT ) == sizeof( VkPhysicalDeviceExternalMemoryHostPropertiesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceExternalMemoryHostPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT>
+  {
+    using Type = PhysicalDeviceExternalMemoryHostPropertiesEXT;
+  };
+
+  struct PhysicalDeviceFloatControlsProperties
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFloatControlsProperties;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceFloatControlsProperties(VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64_ = {}) VULKAN_HPP_NOEXCEPT
+    : denormBehaviorIndependence( denormBehaviorIndependence_ ), roundingModeIndependence( roundingModeIndependence_ ), shaderSignedZeroInfNanPreserveFloat16( shaderSignedZeroInfNanPreserveFloat16_ ), shaderSignedZeroInfNanPreserveFloat32( shaderSignedZeroInfNanPreserveFloat32_ ), shaderSignedZeroInfNanPreserveFloat64( shaderSignedZeroInfNanPreserveFloat64_ ), shaderDenormPreserveFloat16( shaderDenormPreserveFloat16_ ), shaderDenormPreserveFloat32( shaderDenormPreserveFloat32_ ), shaderDenormPreserveFloat64( shaderDenormPreserveFloat64_ ), shaderDenormFlushToZeroFloat16( shaderDenormFlushToZeroFloat16_ ), shaderDenormFlushToZeroFloat32( shaderDenormFlushToZeroFloat32_ ), shaderDenormFlushToZeroFloat64( shaderDenormFlushToZeroFloat64_ ), shaderRoundingModeRTEFloat16( shaderRoundingModeRTEFloat16_ ), shaderRoundingModeRTEFloat32( shaderRoundingModeRTEFloat32_ ), shaderRoundingModeRTEFloat64( shaderRoundingModeRTEFloat64_ ), shaderRoundingModeRTZFloat16( shaderRoundingModeRTZFloat16_ ), shaderRoundingModeRTZFloat32( shaderRoundingModeRTZFloat32_ ), shaderRoundingModeRTZFloat64( shaderRoundingModeRTZFloat64_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceFloatControlsProperties( PhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceFloatControlsProperties( VkPhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceFloatControlsProperties & operator=( VkPhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFloatControlsProperties const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceFloatControlsProperties & operator=( PhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceFloatControlsProperties ) );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceFloatControlsProperties const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceFloatControlsProperties*>( this );
+    }
+
+    operator VkPhysicalDeviceFloatControlsProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceFloatControlsProperties*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceFloatControlsProperties const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceFloatControlsProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( denormBehaviorIndependence == rhs.denormBehaviorIndependence )
+          && ( roundingModeIndependence == rhs.roundingModeIndependence )
+          && ( shaderSignedZeroInfNanPreserveFloat16 == rhs.shaderSignedZeroInfNanPreserveFloat16 )
+          && ( shaderSignedZeroInfNanPreserveFloat32 == rhs.shaderSignedZeroInfNanPreserveFloat32 )
+          && ( shaderSignedZeroInfNanPreserveFloat64 == rhs.shaderSignedZeroInfNanPreserveFloat64 )
+          && ( shaderDenormPreserveFloat16 == rhs.shaderDenormPreserveFloat16 )
+          && ( shaderDenormPreserveFloat32 == rhs.shaderDenormPreserveFloat32 )
+          && ( shaderDenormPreserveFloat64 == rhs.shaderDenormPreserveFloat64 )
+          && ( shaderDenormFlushToZeroFloat16 == rhs.shaderDenormFlushToZeroFloat16 )
+          && ( shaderDenormFlushToZeroFloat32 == rhs.shaderDenormFlushToZeroFloat32 )
+          && ( shaderDenormFlushToZeroFloat64 == rhs.shaderDenormFlushToZeroFloat64 )
+          && ( shaderRoundingModeRTEFloat16 == rhs.shaderRoundingModeRTEFloat16 )
+          && ( shaderRoundingModeRTEFloat32 == rhs.shaderRoundingModeRTEFloat32 )
+          && ( shaderRoundingModeRTEFloat64 == rhs.shaderRoundingModeRTEFloat64 )
+          && ( shaderRoundingModeRTZFloat16 == rhs.shaderRoundingModeRTZFloat16 )
+          && ( shaderRoundingModeRTZFloat32 == rhs.shaderRoundingModeRTZFloat32 )
+          && ( shaderRoundingModeRTZFloat64 == rhs.shaderRoundingModeRTZFloat64 );
+    }
+
+    bool operator!=( PhysicalDeviceFloatControlsProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFloatControlsProperties;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly;
+    VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly;
+    VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64 = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceFloatControlsProperties ) == sizeof( VkPhysicalDeviceFloatControlsProperties ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceFloatControlsProperties>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceFloatControlsProperties>
+  {
+    using Type = PhysicalDeviceFloatControlsProperties;
+  };
+  using PhysicalDeviceFloatControlsPropertiesKHR = PhysicalDeviceFloatControlsProperties;
+
+  struct PhysicalDeviceFragmentDensityMap2FeaturesEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMap2FeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2FeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDeferred_ = {}) VULKAN_HPP_NOEXCEPT
+    : fragmentDensityMapDeferred( fragmentDensityMapDeferred_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2FeaturesEXT( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceFragmentDensityMap2FeaturesEXT( VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceFragmentDensityMap2FeaturesEXT & operator=( VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2FeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceFragmentDensityMap2FeaturesEXT & operator=( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceFragmentDensityMap2FeaturesEXT ) );
+      return *this;
+    }
+
+    PhysicalDeviceFragmentDensityMap2FeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceFragmentDensityMap2FeaturesEXT & setFragmentDensityMapDeferred( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDeferred_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fragmentDensityMapDeferred = fragmentDensityMapDeferred_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceFragmentDensityMap2FeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceFragmentDensityMap2FeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceFragmentDensityMap2FeaturesEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceFragmentDensityMap2FeaturesEXT const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceFragmentDensityMap2FeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( fragmentDensityMapDeferred == rhs.fragmentDensityMapDeferred );
+    }
+
+    bool operator!=( PhysicalDeviceFragmentDensityMap2FeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMap2FeaturesEXT;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDeferred = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceFragmentDensityMap2FeaturesEXT ) == sizeof( VkPhysicalDeviceFragmentDensityMap2FeaturesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceFragmentDensityMap2FeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentDensityMap2FeaturesEXT>
+  {
+    using Type = PhysicalDeviceFragmentDensityMap2FeaturesEXT;
+  };
+
+  struct PhysicalDeviceFragmentDensityMap2PropertiesEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMap2PropertiesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2PropertiesEXT(VULKAN_HPP_NAMESPACE::Bool32 subsampledLoads_ = {}, VULKAN_HPP_NAMESPACE::Bool32 subsampledCoarseReconstructionEarlyAccess_ = {}, uint32_t maxSubsampledArrayLayers_ = {}, uint32_t maxDescriptorSetSubsampledSamplers_ = {}) VULKAN_HPP_NOEXCEPT
+    : subsampledLoads( subsampledLoads_ ), subsampledCoarseReconstructionEarlyAccess( subsampledCoarseReconstructionEarlyAccess_ ), maxSubsampledArrayLayers( maxSubsampledArrayLayers_ ), maxDescriptorSetSubsampledSamplers( maxDescriptorSetSubsampledSamplers_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2PropertiesEXT( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceFragmentDensityMap2PropertiesEXT( VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceFragmentDensityMap2PropertiesEXT & operator=( VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2PropertiesEXT const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceFragmentDensityMap2PropertiesEXT & operator=( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceFragmentDensityMap2PropertiesEXT ) );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceFragmentDensityMap2PropertiesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceFragmentDensityMap2PropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceFragmentDensityMap2PropertiesEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceFragmentDensityMap2PropertiesEXT const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceFragmentDensityMap2PropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( subsampledLoads == rhs.subsampledLoads )
+          && ( subsampledCoarseReconstructionEarlyAccess == rhs.subsampledCoarseReconstructionEarlyAccess )
+          && ( maxSubsampledArrayLayers == rhs.maxSubsampledArrayLayers )
+          && ( maxDescriptorSetSubsampledSamplers == rhs.maxDescriptorSetSubsampledSamplers );
+    }
+
+    bool operator!=( PhysicalDeviceFragmentDensityMap2PropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMap2PropertiesEXT;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 subsampledLoads = {};
+    VULKAN_HPP_NAMESPACE::Bool32 subsampledCoarseReconstructionEarlyAccess = {};
+    uint32_t maxSubsampledArrayLayers = {};
+    uint32_t maxDescriptorSetSubsampledSamplers = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceFragmentDensityMap2PropertiesEXT ) == sizeof( VkPhysicalDeviceFragmentDensityMap2PropertiesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceFragmentDensityMap2PropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentDensityMap2PropertiesEXT>
+  {
+    using Type = PhysicalDeviceFragmentDensityMap2PropertiesEXT;
+  };
+
+  struct PhysicalDeviceFragmentDensityMapFeaturesEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages_ = {}) VULKAN_HPP_NOEXCEPT
+    : fragmentDensityMap( fragmentDensityMap_ ), fragmentDensityMapDynamic( fragmentDensityMapDynamic_ ), fragmentDensityMapNonSubsampledImages( fragmentDensityMapNonSubsampledImages_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapFeaturesEXT( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceFragmentDensityMapFeaturesEXT( VkPhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceFragmentDensityMapFeaturesEXT & operator=( VkPhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceFragmentDensityMapFeaturesEXT & operator=( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceFragmentDensityMapFeaturesEXT ) );
+      return *this;
+    }
+
+    PhysicalDeviceFragmentDensityMapFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceFragmentDensityMapFeaturesEXT & setFragmentDensityMap( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fragmentDensityMap = fragmentDensityMap_;
+      return *this;
+    }
+
+    PhysicalDeviceFragmentDensityMapFeaturesEXT & setFragmentDensityMapDynamic( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fragmentDensityMapDynamic = fragmentDensityMapDynamic_;
+      return *this;
+    }
+
+    PhysicalDeviceFragmentDensityMapFeaturesEXT & setFragmentDensityMapNonSubsampledImages( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fragmentDensityMapNonSubsampledImages = fragmentDensityMapNonSubsampledImages_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceFragmentDensityMapFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceFragmentDensityMapFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceFragmentDensityMapFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceFragmentDensityMapFeaturesEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceFragmentDensityMapFeaturesEXT const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceFragmentDensityMapFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( fragmentDensityMap == rhs.fragmentDensityMap )
+          && ( fragmentDensityMapDynamic == rhs.fragmentDensityMapDynamic )
+          && ( fragmentDensityMapNonSubsampledImages == rhs.fragmentDensityMapNonSubsampledImages );
+    }
+
+    bool operator!=( PhysicalDeviceFragmentDensityMapFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap = {};
+    VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic = {};
+    VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceFragmentDensityMapFeaturesEXT ) == sizeof( VkPhysicalDeviceFragmentDensityMapFeaturesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceFragmentDensityMapFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT>
+  {
+    using Type = PhysicalDeviceFragmentDensityMapFeaturesEXT;
+  };
+
+  struct PhysicalDeviceFragmentDensityMapPropertiesEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapPropertiesEXT(VULKAN_HPP_NAMESPACE::Extent2D minFragmentDensityTexelSize_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxFragmentDensityTexelSize_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityInvocations_ = {}) VULKAN_HPP_NOEXCEPT
+    : minFragmentDensityTexelSize( minFragmentDensityTexelSize_ ), maxFragmentDensityTexelSize( maxFragmentDensityTexelSize_ ), fragmentDensityInvocations( fragmentDensityInvocations_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapPropertiesEXT( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceFragmentDensityMapPropertiesEXT( VkPhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceFragmentDensityMapPropertiesEXT & operator=( VkPhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapPropertiesEXT const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceFragmentDensityMapPropertiesEXT & operator=( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceFragmentDensityMapPropertiesEXT ) );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceFragmentDensityMapPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceFragmentDensityMapPropertiesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceFragmentDensityMapPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceFragmentDensityMapPropertiesEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceFragmentDensityMapPropertiesEXT const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceFragmentDensityMapPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( minFragmentDensityTexelSize == rhs.minFragmentDensityTexelSize )
+          && ( maxFragmentDensityTexelSize == rhs.maxFragmentDensityTexelSize )
+          && ( fragmentDensityInvocations == rhs.fragmentDensityInvocations );
+    }
+
+    bool operator!=( PhysicalDeviceFragmentDensityMapPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Extent2D minFragmentDensityTexelSize = {};
+    VULKAN_HPP_NAMESPACE::Extent2D maxFragmentDensityTexelSize = {};
+    VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityInvocations = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceFragmentDensityMapPropertiesEXT ) == sizeof( VkPhysicalDeviceFragmentDensityMapPropertiesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceFragmentDensityMapPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT>
+  {
+    using Type = PhysicalDeviceFragmentDensityMapPropertiesEXT;
+  };
+
+  struct PhysicalDeviceFragmentShaderBarycentricFeaturesNV
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderBarycentricFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric_ = {}) VULKAN_HPP_NOEXCEPT
+    : fragmentShaderBarycentric( fragmentShaderBarycentric_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderBarycentricFeaturesNV( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceFragmentShaderBarycentricFeaturesNV( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceFragmentShaderBarycentricFeaturesNV & operator=( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricFeaturesNV const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceFragmentShaderBarycentricFeaturesNV & operator=( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceFragmentShaderBarycentricFeaturesNV ) );
+      return *this;
+    }
+
+    PhysicalDeviceFragmentShaderBarycentricFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceFragmentShaderBarycentricFeaturesNV & setFragmentShaderBarycentric( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fragmentShaderBarycentric = fragmentShaderBarycentric_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( fragmentShaderBarycentric == rhs.fragmentShaderBarycentric );
+    }
+
+    bool operator!=( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesNV;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceFragmentShaderBarycentricFeaturesNV ) == sizeof( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceFragmentShaderBarycentricFeaturesNV>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesNV>
+  {
+    using Type = PhysicalDeviceFragmentShaderBarycentricFeaturesNV;
+  };
+
+  struct PhysicalDeviceFragmentShaderInterlockFeaturesEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderInterlockFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock_ = {}) VULKAN_HPP_NOEXCEPT
+    : fragmentShaderSampleInterlock( fragmentShaderSampleInterlock_ ), fragmentShaderPixelInterlock( fragmentShaderPixelInterlock_ ), fragmentShaderShadingRateInterlock( fragmentShaderShadingRateInterlock_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderInterlockFeaturesEXT( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceFragmentShaderInterlockFeaturesEXT( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceFragmentShaderInterlockFeaturesEXT & operator=( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderInterlockFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceFragmentShaderInterlockFeaturesEXT & operator=( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceFragmentShaderInterlockFeaturesEXT ) );
+      return *this;
+    }
+
+    PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setFragmentShaderSampleInterlock( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fragmentShaderSampleInterlock = fragmentShaderSampleInterlock_;
+      return *this;
+    }
+
+    PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setFragmentShaderPixelInterlock( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fragmentShaderPixelInterlock = fragmentShaderPixelInterlock_;
+      return *this;
+    }
+
+    PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setFragmentShaderShadingRateInterlock( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fragmentShaderShadingRateInterlock = fragmentShaderShadingRateInterlock_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( fragmentShaderSampleInterlock == rhs.fragmentShaderSampleInterlock )
+          && ( fragmentShaderPixelInterlock == rhs.fragmentShaderPixelInterlock )
+          && ( fragmentShaderShadingRateInterlock == rhs.fragmentShaderShadingRateInterlock );
+    }
+
+    bool operator!=( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock = {};
+    VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock = {};
+    VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceFragmentShaderInterlockFeaturesEXT ) == sizeof( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceFragmentShaderInterlockFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT>
+  {
+    using Type = PhysicalDeviceFragmentShaderInterlockFeaturesEXT;
+  };
+
+  struct PhysicalDeviceFragmentShadingRateFeaturesKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShadingRateFeaturesKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 pipelineFragmentShadingRate_ = {}, VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRate_ = {}, VULKAN_HPP_NAMESPACE::Bool32 attachmentFragmentShadingRate_ = {}) VULKAN_HPP_NOEXCEPT
+    : pipelineFragmentShadingRate( pipelineFragmentShadingRate_ ), primitiveFragmentShadingRate( primitiveFragmentShadingRate_ ), attachmentFragmentShadingRate( attachmentFragmentShadingRate_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateFeaturesKHR( PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceFragmentShadingRateFeaturesKHR( VkPhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceFragmentShadingRateFeaturesKHR & operator=( VkPhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateFeaturesKHR const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceFragmentShadingRateFeaturesKHR & operator=( PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceFragmentShadingRateFeaturesKHR ) );
+      return *this;
+    }
+
+    PhysicalDeviceFragmentShadingRateFeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceFragmentShadingRateFeaturesKHR & setPipelineFragmentShadingRate( VULKAN_HPP_NAMESPACE::Bool32 pipelineFragmentShadingRate_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipelineFragmentShadingRate = pipelineFragmentShadingRate_;
+      return *this;
+    }
+
+    PhysicalDeviceFragmentShadingRateFeaturesKHR & setPrimitiveFragmentShadingRate( VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRate_ ) VULKAN_HPP_NOEXCEPT
+    {
+      primitiveFragmentShadingRate = primitiveFragmentShadingRate_;
+      return *this;
+    }
+
+    PhysicalDeviceFragmentShadingRateFeaturesKHR & setAttachmentFragmentShadingRate( VULKAN_HPP_NAMESPACE::Bool32 attachmentFragmentShadingRate_ ) VULKAN_HPP_NOEXCEPT
+    {
+      attachmentFragmentShadingRate = attachmentFragmentShadingRate_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceFragmentShadingRateFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceFragmentShadingRateFeaturesKHR*>( this );
+    }
+
+    operator VkPhysicalDeviceFragmentShadingRateFeaturesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceFragmentShadingRateFeaturesKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceFragmentShadingRateFeaturesKHR const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceFragmentShadingRateFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pipelineFragmentShadingRate == rhs.pipelineFragmentShadingRate )
+          && ( primitiveFragmentShadingRate == rhs.primitiveFragmentShadingRate )
+          && ( attachmentFragmentShadingRate == rhs.attachmentFragmentShadingRate );
+    }
+
+    bool operator!=( PhysicalDeviceFragmentShadingRateFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateFeaturesKHR;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 pipelineFragmentShadingRate = {};
+    VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRate = {};
+    VULKAN_HPP_NAMESPACE::Bool32 attachmentFragmentShadingRate = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceFragmentShadingRateFeaturesKHR ) == sizeof( VkPhysicalDeviceFragmentShadingRateFeaturesKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceFragmentShadingRateFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShadingRateFeaturesKHR>
+  {
+    using Type = PhysicalDeviceFragmentShadingRateFeaturesKHR;
+  };
+
+  struct PhysicalDeviceFragmentShadingRatePropertiesKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShadingRatePropertiesKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRatePropertiesKHR(VULKAN_HPP_NAMESPACE::Extent2D minFragmentShadingRateAttachmentTexelSize_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxFragmentShadingRateAttachmentTexelSize_ = {}, uint32_t maxFragmentShadingRateAttachmentTexelSizeAspectRatio_ = {}, VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRateWithMultipleViewports_ = {}, VULKAN_HPP_NAMESPACE::Bool32 layeredShadingRateAttachments_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateNonTrivialCombinerOps_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxFragmentSize_ = {}, uint32_t maxFragmentSizeAspectRatio_ = {}, uint32_t maxFragmentShadingRateCoverageSamples_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateRasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderDepthStencilWrites_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithSampleMask_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderSampleMask_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithConservativeRasterization_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithFragmentShaderInterlock_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithCustomSampleLocations_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateStrictMultiplyCombiner_ = {}) VULKAN_HPP_NOEXCEPT
+    : minFragmentShadingRateAttachmentTexelSize( minFragmentShadingRateAttachmentTexelSize_ ), maxFragmentShadingRateAttachmentTexelSize( maxFragmentShadingRateAttachmentTexelSize_ ), maxFragmentShadingRateAttachmentTexelSizeAspectRatio( maxFragmentShadingRateAttachmentTexelSizeAspectRatio_ ), primitiveFragmentShadingRateWithMultipleViewports( primitiveFragmentShadingRateWithMultipleViewports_ ), layeredShadingRateAttachments( layeredShadingRateAttachments_ ), fragmentShadingRateNonTrivialCombinerOps( fragmentShadingRateNonTrivialCombinerOps_ ), maxFragmentSize( maxFragmentSize_ ), maxFragmentSizeAspectRatio( maxFragmentSizeAspectRatio_ ), maxFragmentShadingRateCoverageSamples( maxFragmentShadingRateCoverageSamples_ ), maxFragmentShadingRateRasterizationSamples( maxFragmentShadingRateRasterizationSamples_ ), fragmentShadingRateWithShaderDepthStencilWrites( fragmentShadingRateWithShaderDepthStencilWrites_ ), fragmentShadingRateWithSampleMask( fragmentShadingRateWithSampleMask_ ), fragmentShadingRateWithShaderSampleMask( fragmentShadingRateWithShaderSampleMask_ ), fragmentShadingRateWithConservativeRasterization( fragmentShadingRateWithConservativeRasterization_ ), fragmentShadingRateWithFragmentShaderInterlock( fragmentShadingRateWithFragmentShaderInterlock_ ), fragmentShadingRateWithCustomSampleLocations( fragmentShadingRateWithCustomSampleLocations_ ), fragmentShadingRateStrictMultiplyCombiner( fragmentShadingRateStrictMultiplyCombiner_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRatePropertiesKHR( PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceFragmentShadingRatePropertiesKHR( VkPhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceFragmentShadingRatePropertiesKHR & operator=( VkPhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRatePropertiesKHR const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceFragmentShadingRatePropertiesKHR & operator=( PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceFragmentShadingRatePropertiesKHR ) );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceFragmentShadingRatePropertiesKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceFragmentShadingRatePropertiesKHR*>( this );
+    }
+
+    operator VkPhysicalDeviceFragmentShadingRatePropertiesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceFragmentShadingRatePropertiesKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceFragmentShadingRatePropertiesKHR const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceFragmentShadingRatePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( minFragmentShadingRateAttachmentTexelSize == rhs.minFragmentShadingRateAttachmentTexelSize )
+          && ( maxFragmentShadingRateAttachmentTexelSize == rhs.maxFragmentShadingRateAttachmentTexelSize )
+          && ( maxFragmentShadingRateAttachmentTexelSizeAspectRatio == rhs.maxFragmentShadingRateAttachmentTexelSizeAspectRatio )
+          && ( primitiveFragmentShadingRateWithMultipleViewports == rhs.primitiveFragmentShadingRateWithMultipleViewports )
+          && ( layeredShadingRateAttachments == rhs.layeredShadingRateAttachments )
+          && ( fragmentShadingRateNonTrivialCombinerOps == rhs.fragmentShadingRateNonTrivialCombinerOps )
+          && ( maxFragmentSize == rhs.maxFragmentSize )
+          && ( maxFragmentSizeAspectRatio == rhs.maxFragmentSizeAspectRatio )
+          && ( maxFragmentShadingRateCoverageSamples == rhs.maxFragmentShadingRateCoverageSamples )
+          && ( maxFragmentShadingRateRasterizationSamples == rhs.maxFragmentShadingRateRasterizationSamples )
+          && ( fragmentShadingRateWithShaderDepthStencilWrites == rhs.fragmentShadingRateWithShaderDepthStencilWrites )
+          && ( fragmentShadingRateWithSampleMask == rhs.fragmentShadingRateWithSampleMask )
+          && ( fragmentShadingRateWithShaderSampleMask == rhs.fragmentShadingRateWithShaderSampleMask )
+          && ( fragmentShadingRateWithConservativeRasterization == rhs.fragmentShadingRateWithConservativeRasterization )
+          && ( fragmentShadingRateWithFragmentShaderInterlock == rhs.fragmentShadingRateWithFragmentShaderInterlock )
+          && ( fragmentShadingRateWithCustomSampleLocations == rhs.fragmentShadingRateWithCustomSampleLocations )
+          && ( fragmentShadingRateStrictMultiplyCombiner == rhs.fragmentShadingRateStrictMultiplyCombiner );
+    }
+
+    bool operator!=( PhysicalDeviceFragmentShadingRatePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRatePropertiesKHR;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Extent2D minFragmentShadingRateAttachmentTexelSize = {};
+    VULKAN_HPP_NAMESPACE::Extent2D maxFragmentShadingRateAttachmentTexelSize = {};
+    uint32_t maxFragmentShadingRateAttachmentTexelSizeAspectRatio = {};
+    VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRateWithMultipleViewports = {};
+    VULKAN_HPP_NAMESPACE::Bool32 layeredShadingRateAttachments = {};
+    VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateNonTrivialCombinerOps = {};
+    VULKAN_HPP_NAMESPACE::Extent2D maxFragmentSize = {};
+    uint32_t maxFragmentSizeAspectRatio = {};
+    uint32_t maxFragmentShadingRateCoverageSamples = {};
+    VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateRasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
+    VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderDepthStencilWrites = {};
+    VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithSampleMask = {};
+    VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderSampleMask = {};
+    VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithConservativeRasterization = {};
+    VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithFragmentShaderInterlock = {};
+    VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithCustomSampleLocations = {};
+    VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateStrictMultiplyCombiner = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceFragmentShadingRatePropertiesKHR ) == sizeof( VkPhysicalDeviceFragmentShadingRatePropertiesKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceFragmentShadingRatePropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShadingRatePropertiesKHR>
+  {
+    using Type = PhysicalDeviceFragmentShadingRatePropertiesKHR;
+  };
+
+  struct PhysicalDeviceGroupProperties
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceGroupProperties;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGroupProperties(uint32_t physicalDeviceCount_ = {}, std::array<VULKAN_HPP_NAMESPACE::PhysicalDevice,VK_MAX_DEVICE_GROUP_SIZE> const& physicalDevices_ = {}, VULKAN_HPP_NAMESPACE::Bool32 subsetAllocation_ = {}) VULKAN_HPP_NOEXCEPT
+    : physicalDeviceCount( physicalDeviceCount_ ), physicalDevices( physicalDevices_ ), subsetAllocation( subsetAllocation_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGroupProperties( PhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceGroupProperties( VkPhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceGroupProperties & operator=( VkPhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceGroupProperties & operator=( PhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceGroupProperties ) );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceGroupProperties const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceGroupProperties*>( this );
+    }
+
+    operator VkPhysicalDeviceGroupProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceGroupProperties*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceGroupProperties const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceGroupProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( physicalDeviceCount == rhs.physicalDeviceCount )
+          && ( physicalDevices == rhs.physicalDevices )
+          && ( subsetAllocation == rhs.subsetAllocation );
+    }
+
+    bool operator!=( PhysicalDeviceGroupProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceGroupProperties;
+    void* pNext = {};
+    uint32_t physicalDeviceCount = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::PhysicalDevice, VK_MAX_DEVICE_GROUP_SIZE> physicalDevices = {};
+    VULKAN_HPP_NAMESPACE::Bool32 subsetAllocation = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceGroupProperties ) == sizeof( VkPhysicalDeviceGroupProperties ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceGroupProperties>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceGroupProperties>
+  {
+    using Type = PhysicalDeviceGroupProperties;
+  };
+  using PhysicalDeviceGroupPropertiesKHR = PhysicalDeviceGroupProperties;
+
+  struct PhysicalDeviceHostQueryResetFeatures
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceHostQueryResetFeatures;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceHostQueryResetFeatures(VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ = {}) VULKAN_HPP_NOEXCEPT
+    : hostQueryReset( hostQueryReset_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceHostQueryResetFeatures( PhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceHostQueryResetFeatures( VkPhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceHostQueryResetFeatures & operator=( VkPhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceHostQueryResetFeatures const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceHostQueryResetFeatures & operator=( PhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceHostQueryResetFeatures ) );
+      return *this;
+    }
+
+    PhysicalDeviceHostQueryResetFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceHostQueryResetFeatures & setHostQueryReset( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      hostQueryReset = hostQueryReset_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceHostQueryResetFeatures const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceHostQueryResetFeatures*>( this );
+    }
+
+    operator VkPhysicalDeviceHostQueryResetFeatures &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceHostQueryResetFeatures*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceHostQueryResetFeatures const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceHostQueryResetFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( hostQueryReset == rhs.hostQueryReset );
+    }
+
+    bool operator!=( PhysicalDeviceHostQueryResetFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceHostQueryResetFeatures;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceHostQueryResetFeatures ) == sizeof( VkPhysicalDeviceHostQueryResetFeatures ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceHostQueryResetFeatures>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceHostQueryResetFeatures>
+  {
+    using Type = PhysicalDeviceHostQueryResetFeatures;
+  };
+  using PhysicalDeviceHostQueryResetFeaturesEXT = PhysicalDeviceHostQueryResetFeatures;
+
+  struct PhysicalDeviceIDProperties
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceIdProperties;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIDProperties(std::array<uint8_t,VK_UUID_SIZE> const& deviceUUID_ = {}, std::array<uint8_t,VK_UUID_SIZE> const& driverUUID_ = {}, std::array<uint8_t,VK_LUID_SIZE> const& deviceLUID_ = {}, uint32_t deviceNodeMask_ = {}, VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid_ = {}) VULKAN_HPP_NOEXCEPT
+    : deviceUUID( deviceUUID_ ), driverUUID( driverUUID_ ), deviceLUID( deviceLUID_ ), deviceNodeMask( deviceNodeMask_ ), deviceLUIDValid( deviceLUIDValid_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIDProperties( PhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceIDProperties( VkPhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceIDProperties & operator=( VkPhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceIDProperties const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceIDProperties & operator=( PhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceIDProperties ) );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceIDProperties const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceIDProperties*>( this );
+    }
+
+    operator VkPhysicalDeviceIDProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceIDProperties*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceIDProperties const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceIDProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( deviceUUID == rhs.deviceUUID )
+          && ( driverUUID == rhs.driverUUID )
+          && ( deviceLUID == rhs.deviceLUID )
+          && ( deviceNodeMask == rhs.deviceNodeMask )
+          && ( deviceLUIDValid == rhs.deviceLUIDValid );
+    }
+
+    bool operator!=( PhysicalDeviceIDProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceIdProperties;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> deviceUUID = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> driverUUID = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_LUID_SIZE> deviceLUID = {};
+    uint32_t deviceNodeMask = {};
+    VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceIDProperties ) == sizeof( VkPhysicalDeviceIDProperties ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceIDProperties>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceIdProperties>
+  {
+    using Type = PhysicalDeviceIDProperties;
+  };
+  using PhysicalDeviceIDPropertiesKHR = PhysicalDeviceIDProperties;
+
+  struct PhysicalDeviceImageDrmFormatModifierInfoEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceImageDrmFormatModifierInfoEXT(uint64_t drmFormatModifier_ = {}, VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, uint32_t queueFamilyIndexCount_ = {}, const uint32_t* pQueueFamilyIndices_ = {}) VULKAN_HPP_NOEXCEPT
+    : drmFormatModifier( drmFormatModifier_ ), sharingMode( sharingMode_ ), queueFamilyIndexCount( queueFamilyIndexCount_ ), pQueueFamilyIndices( pQueueFamilyIndices_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceImageDrmFormatModifierInfoEXT( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceImageDrmFormatModifierInfoEXT( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    PhysicalDeviceImageDrmFormatModifierInfoEXT( uint64_t drmFormatModifier_, VULKAN_HPP_NAMESPACE::SharingMode sharingMode_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_ )
+    : drmFormatModifier( drmFormatModifier_ ), sharingMode( sharingMode_ ), queueFamilyIndexCount( static_cast<uint32_t>( queueFamilyIndices_.size() ) ), pQueueFamilyIndices( queueFamilyIndices_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceImageDrmFormatModifierInfoEXT & operator=( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageDrmFormatModifierInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceImageDrmFormatModifierInfoEXT & operator=( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceImageDrmFormatModifierInfoEXT ) );
+      return *this;
+    }
+
+    PhysicalDeviceImageDrmFormatModifierInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceImageDrmFormatModifierInfoEXT & setDrmFormatModifier( uint64_t drmFormatModifier_ ) VULKAN_HPP_NOEXCEPT
+    {
+      drmFormatModifier = drmFormatModifier_;
+      return *this;
+    }
+
+    PhysicalDeviceImageDrmFormatModifierInfoEXT & setSharingMode( VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sharingMode = sharingMode_;
+      return *this;
+    }
+
+    PhysicalDeviceImageDrmFormatModifierInfoEXT & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queueFamilyIndexCount = queueFamilyIndexCount_;
+      return *this;
+    }
+
+    PhysicalDeviceImageDrmFormatModifierInfoEXT & setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pQueueFamilyIndices = pQueueFamilyIndices_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    PhysicalDeviceImageDrmFormatModifierInfoEXT & setQueueFamilyIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queueFamilyIndexCount = static_cast<uint32_t>( queueFamilyIndices_.size() );
+      pQueueFamilyIndices = queueFamilyIndices_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceImageDrmFormatModifierInfoEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceImageDrmFormatModifierInfoEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceImageDrmFormatModifierInfoEXT const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceImageDrmFormatModifierInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( drmFormatModifier == rhs.drmFormatModifier )
+          && ( sharingMode == rhs.sharingMode )
+          && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount )
+          && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices );
+    }
+
+    bool operator!=( PhysicalDeviceImageDrmFormatModifierInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT;
+    const void* pNext = {};
+    uint64_t drmFormatModifier = {};
+    VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive;
+    uint32_t queueFamilyIndexCount = {};
+    const uint32_t* pQueueFamilyIndices = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceImageDrmFormatModifierInfoEXT ) == sizeof( VkPhysicalDeviceImageDrmFormatModifierInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceImageDrmFormatModifierInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT>
+  {
+    using Type = PhysicalDeviceImageDrmFormatModifierInfoEXT;
+  };
+
+  struct PhysicalDeviceImageRobustnessFeaturesEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageRobustnessFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceImageRobustnessFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess_ = {}) VULKAN_HPP_NOEXCEPT
+    : robustImageAccess( robustImageAccess_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceImageRobustnessFeaturesEXT( PhysicalDeviceImageRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceImageRobustnessFeaturesEXT( VkPhysicalDeviceImageRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceImageRobustnessFeaturesEXT & operator=( VkPhysicalDeviceImageRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageRobustnessFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceImageRobustnessFeaturesEXT & operator=( PhysicalDeviceImageRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceImageRobustnessFeaturesEXT ) );
+      return *this;
+    }
+
+    PhysicalDeviceImageRobustnessFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceImageRobustnessFeaturesEXT & setRobustImageAccess( VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess_ ) VULKAN_HPP_NOEXCEPT
+    {
+      robustImageAccess = robustImageAccess_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceImageRobustnessFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceImageRobustnessFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceImageRobustnessFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceImageRobustnessFeaturesEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceImageRobustnessFeaturesEXT const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceImageRobustnessFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( robustImageAccess == rhs.robustImageAccess );
+    }
+
+    bool operator!=( PhysicalDeviceImageRobustnessFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageRobustnessFeaturesEXT;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceImageRobustnessFeaturesEXT ) == sizeof( VkPhysicalDeviceImageRobustnessFeaturesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceImageRobustnessFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceImageRobustnessFeaturesEXT>
+  {
+    using Type = PhysicalDeviceImageRobustnessFeaturesEXT;
+  };
+
+  struct PhysicalDeviceImageViewImageFormatInfoEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewImageFormatInfoEXT(VULKAN_HPP_NAMESPACE::ImageViewType imageViewType_ = VULKAN_HPP_NAMESPACE::ImageViewType::e1D) VULKAN_HPP_NOEXCEPT
+    : imageViewType( imageViewType_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewImageFormatInfoEXT( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceImageViewImageFormatInfoEXT( VkPhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceImageViewImageFormatInfoEXT & operator=( VkPhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewImageFormatInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceImageViewImageFormatInfoEXT & operator=( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceImageViewImageFormatInfoEXT ) );
+      return *this;
+    }
+
+    PhysicalDeviceImageViewImageFormatInfoEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceImageViewImageFormatInfoEXT & setImageViewType( VULKAN_HPP_NAMESPACE::ImageViewType imageViewType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageViewType = imageViewType_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceImageViewImageFormatInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceImageViewImageFormatInfoEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceImageViewImageFormatInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceImageViewImageFormatInfoEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceImageViewImageFormatInfoEXT const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceImageViewImageFormatInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( imageViewType == rhs.imageViewType );
+    }
+
+    bool operator!=( PhysicalDeviceImageViewImageFormatInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::ImageViewType imageViewType = VULKAN_HPP_NAMESPACE::ImageViewType::e1D;
+
+  };
+  static_assert( sizeof( PhysicalDeviceImageViewImageFormatInfoEXT ) == sizeof( VkPhysicalDeviceImageViewImageFormatInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceImageViewImageFormatInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT>
+  {
+    using Type = PhysicalDeviceImageViewImageFormatInfoEXT;
+  };
+
+  struct PhysicalDeviceImagelessFramebufferFeatures
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImagelessFramebufferFeatures;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceImagelessFramebufferFeatures(VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ = {}) VULKAN_HPP_NOEXCEPT
+    : imagelessFramebuffer( imagelessFramebuffer_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceImagelessFramebufferFeatures( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceImagelessFramebufferFeatures( VkPhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceImagelessFramebufferFeatures & operator=( VkPhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImagelessFramebufferFeatures const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceImagelessFramebufferFeatures & operator=( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceImagelessFramebufferFeatures ) );
+      return *this;
+    }
+
+    PhysicalDeviceImagelessFramebufferFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceImagelessFramebufferFeatures & setImagelessFramebuffer( VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imagelessFramebuffer = imagelessFramebuffer_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceImagelessFramebufferFeatures const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceImagelessFramebufferFeatures*>( this );
+    }
+
+    operator VkPhysicalDeviceImagelessFramebufferFeatures &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceImagelessFramebufferFeatures*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceImagelessFramebufferFeatures const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceImagelessFramebufferFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( imagelessFramebuffer == rhs.imagelessFramebuffer );
+    }
+
+    bool operator!=( PhysicalDeviceImagelessFramebufferFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImagelessFramebufferFeatures;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceImagelessFramebufferFeatures ) == sizeof( VkPhysicalDeviceImagelessFramebufferFeatures ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceImagelessFramebufferFeatures>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceImagelessFramebufferFeatures>
+  {
+    using Type = PhysicalDeviceImagelessFramebufferFeatures;
+  };
+  using PhysicalDeviceImagelessFramebufferFeaturesKHR = PhysicalDeviceImagelessFramebufferFeatures;
+
+  struct PhysicalDeviceIndexTypeUint8FeaturesEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceIndexTypeUint8FeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8_ = {}) VULKAN_HPP_NOEXCEPT
+    : indexTypeUint8( indexTypeUint8_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceIndexTypeUint8FeaturesEXT( PhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceIndexTypeUint8FeaturesEXT( VkPhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceIndexTypeUint8FeaturesEXT & operator=( VkPhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8FeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceIndexTypeUint8FeaturesEXT & operator=( PhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceIndexTypeUint8FeaturesEXT ) );
+      return *this;
+    }
+
+    PhysicalDeviceIndexTypeUint8FeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceIndexTypeUint8FeaturesEXT & setIndexTypeUint8( VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8_ ) VULKAN_HPP_NOEXCEPT
+    {
+      indexTypeUint8 = indexTypeUint8_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceIndexTypeUint8FeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceIndexTypeUint8FeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceIndexTypeUint8FeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceIndexTypeUint8FeaturesEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceIndexTypeUint8FeaturesEXT const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceIndexTypeUint8FeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( indexTypeUint8 == rhs.indexTypeUint8 );
+    }
+
+    bool operator!=( PhysicalDeviceIndexTypeUint8FeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8 = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceIndexTypeUint8FeaturesEXT ) == sizeof( VkPhysicalDeviceIndexTypeUint8FeaturesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceIndexTypeUint8FeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT>
+  {
+    using Type = PhysicalDeviceIndexTypeUint8FeaturesEXT;
+  };
+
+  struct PhysicalDeviceInlineUniformBlockFeaturesEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceInlineUniformBlockFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ = {}) VULKAN_HPP_NOEXCEPT
+    : inlineUniformBlock( inlineUniformBlock_ ), descriptorBindingInlineUniformBlockUpdateAfterBind( descriptorBindingInlineUniformBlockUpdateAfterBind_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockFeaturesEXT( PhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceInlineUniformBlockFeaturesEXT( VkPhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceInlineUniformBlockFeaturesEXT & operator=( VkPhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceInlineUniformBlockFeaturesEXT & operator=( PhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceInlineUniformBlockFeaturesEXT ) );
+      return *this;
+    }
+
+    PhysicalDeviceInlineUniformBlockFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceInlineUniformBlockFeaturesEXT & setInlineUniformBlock( VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ ) VULKAN_HPP_NOEXCEPT
+    {
+      inlineUniformBlock = inlineUniformBlock_;
+      return *this;
+    }
+
+    PhysicalDeviceInlineUniformBlockFeaturesEXT & setDescriptorBindingInlineUniformBlockUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorBindingInlineUniformBlockUpdateAfterBind = descriptorBindingInlineUniformBlockUpdateAfterBind_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceInlineUniformBlockFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceInlineUniformBlockFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceInlineUniformBlockFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceInlineUniformBlockFeaturesEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceInlineUniformBlockFeaturesEXT const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceInlineUniformBlockFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( inlineUniformBlock == rhs.inlineUniformBlock )
+          && ( descriptorBindingInlineUniformBlockUpdateAfterBind == rhs.descriptorBindingInlineUniformBlockUpdateAfterBind );
+    }
+
+    bool operator!=( PhysicalDeviceInlineUniformBlockFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInlineUniformBlockFeaturesEXT;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock = {};
+    VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceInlineUniformBlockFeaturesEXT ) == sizeof( VkPhysicalDeviceInlineUniformBlockFeaturesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceInlineUniformBlockFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceInlineUniformBlockFeaturesEXT>
+  {
+    using Type = PhysicalDeviceInlineUniformBlockFeaturesEXT;
+  };
+
+  struct PhysicalDeviceInlineUniformBlockPropertiesEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceInlineUniformBlockPropertiesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockPropertiesEXT(uint32_t maxInlineUniformBlockSize_ = {}, uint32_t maxPerStageDescriptorInlineUniformBlocks_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ = {}, uint32_t maxDescriptorSetInlineUniformBlocks_ = {}, uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ = {}) VULKAN_HPP_NOEXCEPT
+    : maxInlineUniformBlockSize( maxInlineUniformBlockSize_ ), maxPerStageDescriptorInlineUniformBlocks( maxPerStageDescriptorInlineUniformBlocks_ ), maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ ), maxDescriptorSetInlineUniformBlocks( maxDescriptorSetInlineUniformBlocks_ ), maxDescriptorSetUpdateAfterBindInlineUniformBlocks( maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockPropertiesEXT( PhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceInlineUniformBlockPropertiesEXT( VkPhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceInlineUniformBlockPropertiesEXT & operator=( VkPhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockPropertiesEXT const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceInlineUniformBlockPropertiesEXT & operator=( PhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceInlineUniformBlockPropertiesEXT ) );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceInlineUniformBlockPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceInlineUniformBlockPropertiesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceInlineUniformBlockPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceInlineUniformBlockPropertiesEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceInlineUniformBlockPropertiesEXT const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceInlineUniformBlockPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxInlineUniformBlockSize == rhs.maxInlineUniformBlockSize )
+          && ( maxPerStageDescriptorInlineUniformBlocks == rhs.maxPerStageDescriptorInlineUniformBlocks )
+          && ( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks == rhs.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks )
+          && ( maxDescriptorSetInlineUniformBlocks == rhs.maxDescriptorSetInlineUniformBlocks )
+          && ( maxDescriptorSetUpdateAfterBindInlineUniformBlocks == rhs.maxDescriptorSetUpdateAfterBindInlineUniformBlocks );
+    }
+
+    bool operator!=( PhysicalDeviceInlineUniformBlockPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInlineUniformBlockPropertiesEXT;
+    void* pNext = {};
+    uint32_t maxInlineUniformBlockSize = {};
+    uint32_t maxPerStageDescriptorInlineUniformBlocks = {};
+    uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks = {};
+    uint32_t maxDescriptorSetInlineUniformBlocks = {};
+    uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceInlineUniformBlockPropertiesEXT ) == sizeof( VkPhysicalDeviceInlineUniformBlockPropertiesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceInlineUniformBlockPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceInlineUniformBlockPropertiesEXT>
+  {
+    using Type = PhysicalDeviceInlineUniformBlockPropertiesEXT;
+  };
+
+  struct PhysicalDeviceLineRasterizationFeaturesEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 rectangularLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 smoothLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines_ = {}) VULKAN_HPP_NOEXCEPT
+    : rectangularLines( rectangularLines_ ), bresenhamLines( bresenhamLines_ ), smoothLines( smoothLines_ ), stippledRectangularLines( stippledRectangularLines_ ), stippledBresenhamLines( stippledBresenhamLines_ ), stippledSmoothLines( stippledSmoothLines_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationFeaturesEXT( PhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceLineRasterizationFeaturesEXT( VkPhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceLineRasterizationFeaturesEXT & operator=( VkPhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceLineRasterizationFeaturesEXT & operator=( PhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceLineRasterizationFeaturesEXT ) );
+      return *this;
+    }
+
+    PhysicalDeviceLineRasterizationFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceLineRasterizationFeaturesEXT & setRectangularLines( VULKAN_HPP_NAMESPACE::Bool32 rectangularLines_ ) VULKAN_HPP_NOEXCEPT
+    {
+      rectangularLines = rectangularLines_;
+      return *this;
+    }
+
+    PhysicalDeviceLineRasterizationFeaturesEXT & setBresenhamLines( VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bresenhamLines = bresenhamLines_;
+      return *this;
+    }
+
+    PhysicalDeviceLineRasterizationFeaturesEXT & setSmoothLines( VULKAN_HPP_NAMESPACE::Bool32 smoothLines_ ) VULKAN_HPP_NOEXCEPT
+    {
+      smoothLines = smoothLines_;
+      return *this;
+    }
+
+    PhysicalDeviceLineRasterizationFeaturesEXT & setStippledRectangularLines( VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stippledRectangularLines = stippledRectangularLines_;
+      return *this;
+    }
+
+    PhysicalDeviceLineRasterizationFeaturesEXT & setStippledBresenhamLines( VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stippledBresenhamLines = stippledBresenhamLines_;
+      return *this;
+    }
+
+    PhysicalDeviceLineRasterizationFeaturesEXT & setStippledSmoothLines( VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stippledSmoothLines = stippledSmoothLines_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceLineRasterizationFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceLineRasterizationFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceLineRasterizationFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceLineRasterizationFeaturesEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceLineRasterizationFeaturesEXT const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceLineRasterizationFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( rectangularLines == rhs.rectangularLines )
+          && ( bresenhamLines == rhs.bresenhamLines )
+          && ( smoothLines == rhs.smoothLines )
+          && ( stippledRectangularLines == rhs.stippledRectangularLines )
+          && ( stippledBresenhamLines == rhs.stippledBresenhamLines )
+          && ( stippledSmoothLines == rhs.stippledSmoothLines );
+    }
+
+    bool operator!=( PhysicalDeviceLineRasterizationFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 rectangularLines = {};
+    VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines = {};
+    VULKAN_HPP_NAMESPACE::Bool32 smoothLines = {};
+    VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines = {};
+    VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines = {};
+    VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceLineRasterizationFeaturesEXT ) == sizeof( VkPhysicalDeviceLineRasterizationFeaturesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceLineRasterizationFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT>
+  {
+    using Type = PhysicalDeviceLineRasterizationFeaturesEXT;
+  };
+
+  struct PhysicalDeviceLineRasterizationPropertiesEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationPropertiesEXT(uint32_t lineSubPixelPrecisionBits_ = {}) VULKAN_HPP_NOEXCEPT
+    : lineSubPixelPrecisionBits( lineSubPixelPrecisionBits_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationPropertiesEXT( PhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceLineRasterizationPropertiesEXT( VkPhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceLineRasterizationPropertiesEXT & operator=( VkPhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationPropertiesEXT const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceLineRasterizationPropertiesEXT & operator=( PhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceLineRasterizationPropertiesEXT ) );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceLineRasterizationPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceLineRasterizationPropertiesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceLineRasterizationPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceLineRasterizationPropertiesEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceLineRasterizationPropertiesEXT const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceLineRasterizationPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( lineSubPixelPrecisionBits == rhs.lineSubPixelPrecisionBits );
+    }
+
+    bool operator!=( PhysicalDeviceLineRasterizationPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT;
+    void* pNext = {};
+    uint32_t lineSubPixelPrecisionBits = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceLineRasterizationPropertiesEXT ) == sizeof( VkPhysicalDeviceLineRasterizationPropertiesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceLineRasterizationPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT>
+  {
+    using Type = PhysicalDeviceLineRasterizationPropertiesEXT;
+  };
+
+  struct PhysicalDeviceMaintenance3Properties
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance3Properties;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance3Properties(uint32_t maxPerSetDescriptors_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize_ = {}) VULKAN_HPP_NOEXCEPT
+    : maxPerSetDescriptors( maxPerSetDescriptors_ ), maxMemoryAllocationSize( maxMemoryAllocationSize_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance3Properties( PhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceMaintenance3Properties( VkPhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceMaintenance3Properties & operator=( VkPhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance3Properties const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceMaintenance3Properties & operator=( PhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceMaintenance3Properties ) );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceMaintenance3Properties const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceMaintenance3Properties*>( this );
+    }
+
+    operator VkPhysicalDeviceMaintenance3Properties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceMaintenance3Properties*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceMaintenance3Properties const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceMaintenance3Properties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxPerSetDescriptors == rhs.maxPerSetDescriptors )
+          && ( maxMemoryAllocationSize == rhs.maxMemoryAllocationSize );
+    }
+
+    bool operator!=( PhysicalDeviceMaintenance3Properties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance3Properties;
+    void* pNext = {};
+    uint32_t maxPerSetDescriptors = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceMaintenance3Properties ) == sizeof( VkPhysicalDeviceMaintenance3Properties ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceMaintenance3Properties>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceMaintenance3Properties>
+  {
+    using Type = PhysicalDeviceMaintenance3Properties;
+  };
+  using PhysicalDeviceMaintenance3PropertiesKHR = PhysicalDeviceMaintenance3Properties;
+
+  struct PhysicalDeviceMemoryBudgetPropertiesEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryBudgetPropertiesEXT(std::array<VULKAN_HPP_NAMESPACE::DeviceSize,VK_MAX_MEMORY_HEAPS> const& heapBudget_ = {}, std::array<VULKAN_HPP_NAMESPACE::DeviceSize,VK_MAX_MEMORY_HEAPS> const& heapUsage_ = {}) VULKAN_HPP_NOEXCEPT
+    : heapBudget( heapBudget_ ), heapUsage( heapUsage_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryBudgetPropertiesEXT( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceMemoryBudgetPropertiesEXT( VkPhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceMemoryBudgetPropertiesEXT & operator=( VkPhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryBudgetPropertiesEXT const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceMemoryBudgetPropertiesEXT & operator=( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceMemoryBudgetPropertiesEXT ) );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceMemoryBudgetPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceMemoryBudgetPropertiesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceMemoryBudgetPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceMemoryBudgetPropertiesEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceMemoryBudgetPropertiesEXT const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceMemoryBudgetPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( heapBudget == rhs.heapBudget )
+          && ( heapUsage == rhs.heapUsage );
+    }
+
+    bool operator!=( PhysicalDeviceMemoryBudgetPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::DeviceSize, VK_MAX_MEMORY_HEAPS> heapBudget = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::DeviceSize, VK_MAX_MEMORY_HEAPS> heapUsage = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceMemoryBudgetPropertiesEXT ) == sizeof( VkPhysicalDeviceMemoryBudgetPropertiesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceMemoryBudgetPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT>
+  {
+    using Type = PhysicalDeviceMemoryBudgetPropertiesEXT;
+  };
+
+  struct PhysicalDeviceMemoryPriorityFeaturesEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryPriorityFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 memoryPriority_ = {}) VULKAN_HPP_NOEXCEPT
+    : memoryPriority( memoryPriority_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryPriorityFeaturesEXT( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceMemoryPriorityFeaturesEXT( VkPhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceMemoryPriorityFeaturesEXT & operator=( VkPhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryPriorityFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceMemoryPriorityFeaturesEXT & operator=( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceMemoryPriorityFeaturesEXT ) );
+      return *this;
+    }
+
+    PhysicalDeviceMemoryPriorityFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceMemoryPriorityFeaturesEXT & setMemoryPriority( VULKAN_HPP_NAMESPACE::Bool32 memoryPriority_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memoryPriority = memoryPriority_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceMemoryPriorityFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceMemoryPriorityFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceMemoryPriorityFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceMemoryPriorityFeaturesEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceMemoryPriorityFeaturesEXT const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceMemoryPriorityFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memoryPriority == rhs.memoryPriority );
+    }
+
+    bool operator!=( PhysicalDeviceMemoryPriorityFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 memoryPriority = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceMemoryPriorityFeaturesEXT ) == sizeof( VkPhysicalDeviceMemoryPriorityFeaturesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceMemoryPriorityFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT>
+  {
+    using Type = PhysicalDeviceMemoryPriorityFeaturesEXT;
+  };
+
+  struct PhysicalDeviceMeshShaderFeaturesNV
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMeshShaderFeaturesNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 taskShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 meshShader_ = {}) VULKAN_HPP_NOEXCEPT
+    : taskShader( taskShader_ ), meshShader( meshShader_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesNV( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceMeshShaderFeaturesNV( VkPhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceMeshShaderFeaturesNV & operator=( VkPhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesNV const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceMeshShaderFeaturesNV & operator=( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceMeshShaderFeaturesNV ) );
+      return *this;
+    }
+
+    PhysicalDeviceMeshShaderFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceMeshShaderFeaturesNV & setTaskShader( VULKAN_HPP_NAMESPACE::Bool32 taskShader_ ) VULKAN_HPP_NOEXCEPT
+    {
+      taskShader = taskShader_;
+      return *this;
+    }
+
+    PhysicalDeviceMeshShaderFeaturesNV & setMeshShader( VULKAN_HPP_NAMESPACE::Bool32 meshShader_ ) VULKAN_HPP_NOEXCEPT
+    {
+      meshShader = meshShader_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceMeshShaderFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceMeshShaderFeaturesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceMeshShaderFeaturesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceMeshShaderFeaturesNV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceMeshShaderFeaturesNV const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceMeshShaderFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( taskShader == rhs.taskShader )
+          && ( meshShader == rhs.meshShader );
+    }
+
+    bool operator!=( PhysicalDeviceMeshShaderFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMeshShaderFeaturesNV;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 taskShader = {};
+    VULKAN_HPP_NAMESPACE::Bool32 meshShader = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceMeshShaderFeaturesNV ) == sizeof( VkPhysicalDeviceMeshShaderFeaturesNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceMeshShaderFeaturesNV>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceMeshShaderFeaturesNV>
+  {
+    using Type = PhysicalDeviceMeshShaderFeaturesNV;
+  };
+
+  struct PhysicalDeviceMeshShaderPropertiesNV
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMeshShaderPropertiesNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderPropertiesNV(uint32_t maxDrawMeshTasksCount_ = {}, uint32_t maxTaskWorkGroupInvocations_ = {}, std::array<uint32_t,3> const& maxTaskWorkGroupSize_ = {}, uint32_t maxTaskTotalMemorySize_ = {}, uint32_t maxTaskOutputCount_ = {}, uint32_t maxMeshWorkGroupInvocations_ = {}, std::array<uint32_t,3> const& maxMeshWorkGroupSize_ = {}, uint32_t maxMeshTotalMemorySize_ = {}, uint32_t maxMeshOutputVertices_ = {}, uint32_t maxMeshOutputPrimitives_ = {}, uint32_t maxMeshMultiviewViewCount_ = {}, uint32_t meshOutputPerVertexGranularity_ = {}, uint32_t meshOutputPerPrimitiveGranularity_ = {}) VULKAN_HPP_NOEXCEPT
+    : maxDrawMeshTasksCount( maxDrawMeshTasksCount_ ), maxTaskWorkGroupInvocations( maxTaskWorkGroupInvocations_ ), maxTaskWorkGroupSize( maxTaskWorkGroupSize_ ), maxTaskTotalMemorySize( maxTaskTotalMemorySize_ ), maxTaskOutputCount( maxTaskOutputCount_ ), maxMeshWorkGroupInvocations( maxMeshWorkGroupInvocations_ ), maxMeshWorkGroupSize( maxMeshWorkGroupSize_ ), maxMeshTotalMemorySize( maxMeshTotalMemorySize_ ), maxMeshOutputVertices( maxMeshOutputVertices_ ), maxMeshOutputPrimitives( maxMeshOutputPrimitives_ ), maxMeshMultiviewViewCount( maxMeshMultiviewViewCount_ ), meshOutputPerVertexGranularity( meshOutputPerVertexGranularity_ ), meshOutputPerPrimitiveGranularity( meshOutputPerPrimitiveGranularity_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderPropertiesNV( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceMeshShaderPropertiesNV( VkPhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceMeshShaderPropertiesNV & operator=( VkPhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesNV const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceMeshShaderPropertiesNV & operator=( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceMeshShaderPropertiesNV ) );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceMeshShaderPropertiesNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceMeshShaderPropertiesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceMeshShaderPropertiesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceMeshShaderPropertiesNV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceMeshShaderPropertiesNV const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceMeshShaderPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxDrawMeshTasksCount == rhs.maxDrawMeshTasksCount )
+          && ( maxTaskWorkGroupInvocations == rhs.maxTaskWorkGroupInvocations )
+          && ( maxTaskWorkGroupSize == rhs.maxTaskWorkGroupSize )
+          && ( maxTaskTotalMemorySize == rhs.maxTaskTotalMemorySize )
+          && ( maxTaskOutputCount == rhs.maxTaskOutputCount )
+          && ( maxMeshWorkGroupInvocations == rhs.maxMeshWorkGroupInvocations )
+          && ( maxMeshWorkGroupSize == rhs.maxMeshWorkGroupSize )
+          && ( maxMeshTotalMemorySize == rhs.maxMeshTotalMemorySize )
+          && ( maxMeshOutputVertices == rhs.maxMeshOutputVertices )
+          && ( maxMeshOutputPrimitives == rhs.maxMeshOutputPrimitives )
+          && ( maxMeshMultiviewViewCount == rhs.maxMeshMultiviewViewCount )
+          && ( meshOutputPerVertexGranularity == rhs.meshOutputPerVertexGranularity )
+          && ( meshOutputPerPrimitiveGranularity == rhs.meshOutputPerPrimitiveGranularity );
+    }
+
+    bool operator!=( PhysicalDeviceMeshShaderPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMeshShaderPropertiesNV;
+    void* pNext = {};
+    uint32_t maxDrawMeshTasksCount = {};
+    uint32_t maxTaskWorkGroupInvocations = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> maxTaskWorkGroupSize = {};
+    uint32_t maxTaskTotalMemorySize = {};
+    uint32_t maxTaskOutputCount = {};
+    uint32_t maxMeshWorkGroupInvocations = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> maxMeshWorkGroupSize = {};
+    uint32_t maxMeshTotalMemorySize = {};
+    uint32_t maxMeshOutputVertices = {};
+    uint32_t maxMeshOutputPrimitives = {};
+    uint32_t maxMeshMultiviewViewCount = {};
+    uint32_t meshOutputPerVertexGranularity = {};
+    uint32_t meshOutputPerPrimitiveGranularity = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceMeshShaderPropertiesNV ) == sizeof( VkPhysicalDeviceMeshShaderPropertiesNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceMeshShaderPropertiesNV>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceMeshShaderPropertiesNV>
+  {
+    using Type = PhysicalDeviceMeshShaderPropertiesNV;
+  };
+
+  struct PhysicalDeviceMultiviewFeatures
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiviewFeatures;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewFeatures(VULKAN_HPP_NAMESPACE::Bool32 multiview_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ = {}) VULKAN_HPP_NOEXCEPT
+    : multiview( multiview_ ), multiviewGeometryShader( multiviewGeometryShader_ ), multiviewTessellationShader( multiviewTessellationShader_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewFeatures( PhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceMultiviewFeatures( VkPhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceMultiviewFeatures & operator=( VkPhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewFeatures const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceMultiviewFeatures & operator=( PhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceMultiviewFeatures ) );
+      return *this;
+    }
+
+    PhysicalDeviceMultiviewFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceMultiviewFeatures & setMultiview( VULKAN_HPP_NAMESPACE::Bool32 multiview_ ) VULKAN_HPP_NOEXCEPT
+    {
+      multiview = multiview_;
+      return *this;
+    }
+
+    PhysicalDeviceMultiviewFeatures & setMultiviewGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ ) VULKAN_HPP_NOEXCEPT
+    {
+      multiviewGeometryShader = multiviewGeometryShader_;
+      return *this;
+    }
+
+    PhysicalDeviceMultiviewFeatures & setMultiviewTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ ) VULKAN_HPP_NOEXCEPT
+    {
+      multiviewTessellationShader = multiviewTessellationShader_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceMultiviewFeatures const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceMultiviewFeatures*>( this );
+    }
+
+    operator VkPhysicalDeviceMultiviewFeatures &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceMultiviewFeatures*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceMultiviewFeatures const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceMultiviewFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( multiview == rhs.multiview )
+          && ( multiviewGeometryShader == rhs.multiviewGeometryShader )
+          && ( multiviewTessellationShader == rhs.multiviewTessellationShader );
+    }
+
+    bool operator!=( PhysicalDeviceMultiviewFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewFeatures;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 multiview = {};
+    VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader = {};
+    VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceMultiviewFeatures ) == sizeof( VkPhysicalDeviceMultiviewFeatures ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceMultiviewFeatures>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceMultiviewFeatures>
+  {
+    using Type = PhysicalDeviceMultiviewFeatures;
+  };
+  using PhysicalDeviceMultiviewFeaturesKHR = PhysicalDeviceMultiviewFeatures;
+
+  struct PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX(VULKAN_HPP_NAMESPACE::Bool32 perViewPositionAllComponents_ = {}) VULKAN_HPP_NOEXCEPT
+    : perViewPositionAllComponents( perViewPositionAllComponents_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX & operator=( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX & operator=( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ) );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX*>( this );
+    }
+
+    operator VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( perViewPositionAllComponents == rhs.perViewPositionAllComponents );
+    }
+
+    bool operator!=( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 perViewPositionAllComponents = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ) == sizeof( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX>
+  {
+    using Type = PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX;
+  };
+
+  struct PhysicalDeviceMultiviewProperties
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiviewProperties;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewProperties(uint32_t maxMultiviewViewCount_ = {}, uint32_t maxMultiviewInstanceIndex_ = {}) VULKAN_HPP_NOEXCEPT
+    : maxMultiviewViewCount( maxMultiviewViewCount_ ), maxMultiviewInstanceIndex( maxMultiviewInstanceIndex_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewProperties( PhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceMultiviewProperties( VkPhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceMultiviewProperties & operator=( VkPhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewProperties const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceMultiviewProperties & operator=( PhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceMultiviewProperties ) );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceMultiviewProperties const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceMultiviewProperties*>( this );
+    }
+
+    operator VkPhysicalDeviceMultiviewProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceMultiviewProperties*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceMultiviewProperties const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceMultiviewProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxMultiviewViewCount == rhs.maxMultiviewViewCount )
+          && ( maxMultiviewInstanceIndex == rhs.maxMultiviewInstanceIndex );
+    }
+
+    bool operator!=( PhysicalDeviceMultiviewProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewProperties;
+    void* pNext = {};
+    uint32_t maxMultiviewViewCount = {};
+    uint32_t maxMultiviewInstanceIndex = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceMultiviewProperties ) == sizeof( VkPhysicalDeviceMultiviewProperties ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceMultiviewProperties>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceMultiviewProperties>
+  {
+    using Type = PhysicalDeviceMultiviewProperties;
+  };
+  using PhysicalDeviceMultiviewPropertiesKHR = PhysicalDeviceMultiviewProperties;
+
+  struct PhysicalDevicePCIBusInfoPropertiesEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePciBusInfoPropertiesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDevicePCIBusInfoPropertiesEXT(uint32_t pciDomain_ = {}, uint32_t pciBus_ = {}, uint32_t pciDevice_ = {}, uint32_t pciFunction_ = {}) VULKAN_HPP_NOEXCEPT
+    : pciDomain( pciDomain_ ), pciBus( pciBus_ ), pciDevice( pciDevice_ ), pciFunction( pciFunction_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDevicePCIBusInfoPropertiesEXT( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDevicePCIBusInfoPropertiesEXT( VkPhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDevicePCIBusInfoPropertiesEXT & operator=( VkPhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePCIBusInfoPropertiesEXT const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDevicePCIBusInfoPropertiesEXT & operator=( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDevicePCIBusInfoPropertiesEXT ) );
+      return *this;
+    }
+
+
+    operator VkPhysicalDevicePCIBusInfoPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDevicePCIBusInfoPropertiesEXT*>( this );
+    }
+
+    operator VkPhysicalDevicePCIBusInfoPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDevicePCIBusInfoPropertiesEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDevicePCIBusInfoPropertiesEXT const& ) const = default;
+#else
+    bool operator==( PhysicalDevicePCIBusInfoPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pciDomain == rhs.pciDomain )
+          && ( pciBus == rhs.pciBus )
+          && ( pciDevice == rhs.pciDevice )
+          && ( pciFunction == rhs.pciFunction );
+    }
+
+    bool operator!=( PhysicalDevicePCIBusInfoPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePciBusInfoPropertiesEXT;
+    void* pNext = {};
+    uint32_t pciDomain = {};
+    uint32_t pciBus = {};
+    uint32_t pciDevice = {};
+    uint32_t pciFunction = {};
+
+  };
+  static_assert( sizeof( PhysicalDevicePCIBusInfoPropertiesEXT ) == sizeof( VkPhysicalDevicePCIBusInfoPropertiesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDevicePCIBusInfoPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDevicePciBusInfoPropertiesEXT>
+  {
+    using Type = PhysicalDevicePCIBusInfoPropertiesEXT;
+  };
+
+  struct PhysicalDevicePerformanceQueryFeaturesKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools_ = {}, VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools_ = {}) VULKAN_HPP_NOEXCEPT
+    : performanceCounterQueryPools( performanceCounterQueryPools_ ), performanceCounterMultipleQueryPools( performanceCounterMultipleQueryPools_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryFeaturesKHR( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDevicePerformanceQueryFeaturesKHR( VkPhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDevicePerformanceQueryFeaturesKHR & operator=( VkPhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryFeaturesKHR const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDevicePerformanceQueryFeaturesKHR & operator=( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDevicePerformanceQueryFeaturesKHR ) );
+      return *this;
+    }
+
+    PhysicalDevicePerformanceQueryFeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDevicePerformanceQueryFeaturesKHR & setPerformanceCounterQueryPools( VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools_ ) VULKAN_HPP_NOEXCEPT
+    {
+      performanceCounterQueryPools = performanceCounterQueryPools_;
+      return *this;
+    }
+
+    PhysicalDevicePerformanceQueryFeaturesKHR & setPerformanceCounterMultipleQueryPools( VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools_ ) VULKAN_HPP_NOEXCEPT
+    {
+      performanceCounterMultipleQueryPools = performanceCounterMultipleQueryPools_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDevicePerformanceQueryFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDevicePerformanceQueryFeaturesKHR*>( this );
+    }
+
+    operator VkPhysicalDevicePerformanceQueryFeaturesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDevicePerformanceQueryFeaturesKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDevicePerformanceQueryFeaturesKHR const& ) const = default;
+#else
+    bool operator==( PhysicalDevicePerformanceQueryFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( performanceCounterQueryPools == rhs.performanceCounterQueryPools )
+          && ( performanceCounterMultipleQueryPools == rhs.performanceCounterMultipleQueryPools );
+    }
+
+    bool operator!=( PhysicalDevicePerformanceQueryFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools = {};
+    VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools = {};
+
+  };
+  static_assert( sizeof( PhysicalDevicePerformanceQueryFeaturesKHR ) == sizeof( VkPhysicalDevicePerformanceQueryFeaturesKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDevicePerformanceQueryFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR>
+  {
+    using Type = PhysicalDevicePerformanceQueryFeaturesKHR;
+  };
+
+  struct PhysicalDevicePerformanceQueryPropertiesKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryPropertiesKHR(VULKAN_HPP_NAMESPACE::Bool32 allowCommandBufferQueryCopies_ = {}) VULKAN_HPP_NOEXCEPT
+    : allowCommandBufferQueryCopies( allowCommandBufferQueryCopies_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryPropertiesKHR( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDevicePerformanceQueryPropertiesKHR( VkPhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDevicePerformanceQueryPropertiesKHR & operator=( VkPhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryPropertiesKHR const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDevicePerformanceQueryPropertiesKHR & operator=( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDevicePerformanceQueryPropertiesKHR ) );
+      return *this;
+    }
+
+
+    operator VkPhysicalDevicePerformanceQueryPropertiesKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDevicePerformanceQueryPropertiesKHR*>( this );
+    }
+
+    operator VkPhysicalDevicePerformanceQueryPropertiesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDevicePerformanceQueryPropertiesKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDevicePerformanceQueryPropertiesKHR const& ) const = default;
+#else
+    bool operator==( PhysicalDevicePerformanceQueryPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( allowCommandBufferQueryCopies == rhs.allowCommandBufferQueryCopies );
+    }
+
+    bool operator!=( PhysicalDevicePerformanceQueryPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 allowCommandBufferQueryCopies = {};
+
+  };
+  static_assert( sizeof( PhysicalDevicePerformanceQueryPropertiesKHR ) == sizeof( VkPhysicalDevicePerformanceQueryPropertiesKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDevicePerformanceQueryPropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR>
+  {
+    using Type = PhysicalDevicePerformanceQueryPropertiesKHR;
+  };
+
+  struct PhysicalDevicePipelineCreationCacheControlFeaturesEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineCreationCacheControlFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineCreationCacheControlFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ = {}) VULKAN_HPP_NOEXCEPT
+    : pipelineCreationCacheControl( pipelineCreationCacheControl_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineCreationCacheControlFeaturesEXT( PhysicalDevicePipelineCreationCacheControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDevicePipelineCreationCacheControlFeaturesEXT( VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDevicePipelineCreationCacheControlFeaturesEXT & operator=( VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineCreationCacheControlFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDevicePipelineCreationCacheControlFeaturesEXT & operator=( PhysicalDevicePipelineCreationCacheControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDevicePipelineCreationCacheControlFeaturesEXT ) );
+      return *this;
+    }
+
+    PhysicalDevicePipelineCreationCacheControlFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDevicePipelineCreationCacheControlFeaturesEXT & setPipelineCreationCacheControl( VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipelineCreationCacheControl = pipelineCreationCacheControl_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDevicePipelineCreationCacheControlFeaturesEXT const& ) const = default;
+#else
+    bool operator==( PhysicalDevicePipelineCreationCacheControlFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pipelineCreationCacheControl == rhs.pipelineCreationCacheControl );
+    }
+
+    bool operator!=( PhysicalDevicePipelineCreationCacheControlFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineCreationCacheControlFeaturesEXT;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl = {};
+
+  };
+  static_assert( sizeof( PhysicalDevicePipelineCreationCacheControlFeaturesEXT ) == sizeof( VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDevicePipelineCreationCacheControlFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDevicePipelineCreationCacheControlFeaturesEXT>
+  {
+    using Type = PhysicalDevicePipelineCreationCacheControlFeaturesEXT;
+  };
+
+  struct PhysicalDevicePipelineExecutablePropertiesFeaturesKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineExecutablePropertiesFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo_ = {}) VULKAN_HPP_NOEXCEPT
+    : pipelineExecutableInfo( pipelineExecutableInfo_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & operator=( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & operator=( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR ) );
+      return *this;
+    }
+
+    PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & setPipelineExecutableInfo( VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipelineExecutableInfo = pipelineExecutableInfo_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR*>( this );
+    }
+
+    operator VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const& ) const = default;
+#else
+    bool operator==( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pipelineExecutableInfo == rhs.pipelineExecutableInfo );
+    }
+
+    bool operator!=( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo = {};
+
+  };
+  static_assert( sizeof( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR ) == sizeof( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDevicePipelineExecutablePropertiesFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR>
+  {
+    using Type = PhysicalDevicePipelineExecutablePropertiesFeaturesKHR;
   };
 
   struct PhysicalDevicePointClippingProperties
   {
-    operator VkPhysicalDevicePointClippingProperties const&() const
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePointClippingProperties;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDevicePointClippingProperties(VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior_ = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes) VULKAN_HPP_NOEXCEPT
+    : pointClippingBehavior( pointClippingBehavior_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDevicePointClippingProperties( PhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDevicePointClippingProperties( VkPhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkPhysicalDevicePointClippingProperties*>(this);
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDevicePointClippingProperties & operator=( VkPhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePointClippingProperties const *>( &rhs );
+      return *this;
     }
 
-    operator VkPhysicalDevicePointClippingProperties &()
+    PhysicalDevicePointClippingProperties & operator=( PhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkPhysicalDevicePointClippingProperties*>(this);
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDevicePointClippingProperties ) );
+      return *this;
     }
 
-    bool operator==( PhysicalDevicePointClippingProperties const& rhs ) const
+
+    operator VkPhysicalDevicePointClippingProperties const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDevicePointClippingProperties*>( this );
+    }
+
+    operator VkPhysicalDevicePointClippingProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDevicePointClippingProperties*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDevicePointClippingProperties const& ) const = default;
+#else
+    bool operator==( PhysicalDevicePointClippingProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
           && ( pointClippingBehavior == rhs.pointClippingBehavior );
     }
 
-    bool operator!=( PhysicalDevicePointClippingProperties const& rhs ) const
+    bool operator!=( PhysicalDevicePointClippingProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
+#endif
 
-  private:
-    StructureType sType = StructureType::ePhysicalDevicePointClippingProperties;
+
 
   public:
-    void* pNext = nullptr;
-    PointClippingBehavior pointClippingBehavior;
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePointClippingProperties;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes;
+
   };
   static_assert( sizeof( PhysicalDevicePointClippingProperties ) == sizeof( VkPhysicalDevicePointClippingProperties ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDevicePointClippingProperties>::value, "struct wrapper is not a standard layout!" );
 
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDevicePointClippingProperties>
+  {
+    using Type = PhysicalDevicePointClippingProperties;
+  };
   using PhysicalDevicePointClippingPropertiesKHR = PhysicalDevicePointClippingProperties;
 
-  enum class SamplerReductionModeEXT
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+  struct PhysicalDevicePortabilitySubsetFeaturesKHR
   {
-    eWeightedAverage = VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT,
-    eMin = VK_SAMPLER_REDUCTION_MODE_MIN_EXT,
-    eMax = VK_SAMPLER_REDUCTION_MODE_MAX_EXT
-  };
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePortabilitySubsetFeaturesKHR;
 
-  struct SamplerReductionModeCreateInfoEXT
-  {
-    SamplerReductionModeCreateInfoEXT( SamplerReductionModeEXT reductionMode_ = SamplerReductionModeEXT::eWeightedAverage )
-      : reductionMode( reductionMode_ )
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 constantAlphaColorBlendFactors_ = {}, VULKAN_HPP_NAMESPACE::Bool32 events_ = {}, VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatReinterpretation_ = {}, VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatSwizzle_ = {}, VULKAN_HPP_NAMESPACE::Bool32 imageView2DOn3DImage_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multisampleArrayImage_ = {}, VULKAN_HPP_NAMESPACE::Bool32 mutableComparisonSamplers_ = {}, VULKAN_HPP_NAMESPACE::Bool32 pointPolygons_ = {}, VULKAN_HPP_NAMESPACE::Bool32 samplerMipLodBias_ = {}, VULKAN_HPP_NAMESPACE::Bool32 separateStencilMaskRef_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSampleRateInterpolationFunctions_ = {}, VULKAN_HPP_NAMESPACE::Bool32 tessellationIsolines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 tessellationPointMode_ = {}, VULKAN_HPP_NAMESPACE::Bool32 triangleFans_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeAccessBeyondStride_ = {}) VULKAN_HPP_NOEXCEPT
+    : constantAlphaColorBlendFactors( constantAlphaColorBlendFactors_ ), events( events_ ), imageViewFormatReinterpretation( imageViewFormatReinterpretation_ ), imageViewFormatSwizzle( imageViewFormatSwizzle_ ), imageView2DOn3DImage( imageView2DOn3DImage_ ), multisampleArrayImage( multisampleArrayImage_ ), mutableComparisonSamplers( mutableComparisonSamplers_ ), pointPolygons( pointPolygons_ ), samplerMipLodBias( samplerMipLodBias_ ), separateStencilMaskRef( separateStencilMaskRef_ ), shaderSampleRateInterpolationFunctions( shaderSampleRateInterpolationFunctions_ ), tessellationIsolines( tessellationIsolines_ ), tessellationPointMode( tessellationPointMode_ ), triangleFans( triangleFans_ ), vertexAttributeAccessBeyondStride( vertexAttributeAccessBeyondStride_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetFeaturesKHR( PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDevicePortabilitySubsetFeaturesKHR( VkPhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
     {
+      *this = rhs;
     }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    SamplerReductionModeCreateInfoEXT( VkSamplerReductionModeCreateInfoEXT const & rhs )
+    PhysicalDevicePortabilitySubsetFeaturesKHR & operator=( VkPhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      memcpy( this, &rhs, sizeof( SamplerReductionModeCreateInfoEXT ) );
-    }
-
-    SamplerReductionModeCreateInfoEXT& operator=( VkSamplerReductionModeCreateInfoEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( SamplerReductionModeCreateInfoEXT ) );
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetFeaturesKHR const *>( &rhs );
       return *this;
     }
-    SamplerReductionModeCreateInfoEXT& setPNext( const void* pNext_ )
+
+    PhysicalDevicePortabilitySubsetFeaturesKHR & operator=( PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDevicePortabilitySubsetFeaturesKHR ) );
+      return *this;
+    }
+
+    PhysicalDevicePortabilitySubsetFeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
       pNext = pNext_;
       return *this;
     }
 
-    SamplerReductionModeCreateInfoEXT& setReductionMode( SamplerReductionModeEXT reductionMode_ )
+    PhysicalDevicePortabilitySubsetFeaturesKHR & setConstantAlphaColorBlendFactors( VULKAN_HPP_NAMESPACE::Bool32 constantAlphaColorBlendFactors_ ) VULKAN_HPP_NOEXCEPT
     {
-      reductionMode = reductionMode_;
+      constantAlphaColorBlendFactors = constantAlphaColorBlendFactors_;
       return *this;
     }
 
-    operator VkSamplerReductionModeCreateInfoEXT const&() const
+    PhysicalDevicePortabilitySubsetFeaturesKHR & setEvents( VULKAN_HPP_NAMESPACE::Bool32 events_ ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkSamplerReductionModeCreateInfoEXT*>(this);
+      events = events_;
+      return *this;
     }
 
-    operator VkSamplerReductionModeCreateInfoEXT &()
+    PhysicalDevicePortabilitySubsetFeaturesKHR & setImageViewFormatReinterpretation( VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatReinterpretation_ ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkSamplerReductionModeCreateInfoEXT*>(this);
+      imageViewFormatReinterpretation = imageViewFormatReinterpretation_;
+      return *this;
     }
 
-    bool operator==( SamplerReductionModeCreateInfoEXT const& rhs ) const
+    PhysicalDevicePortabilitySubsetFeaturesKHR & setImageViewFormatSwizzle( VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatSwizzle_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageViewFormatSwizzle = imageViewFormatSwizzle_;
+      return *this;
+    }
+
+    PhysicalDevicePortabilitySubsetFeaturesKHR & setImageView2DOn3DImage( VULKAN_HPP_NAMESPACE::Bool32 imageView2DOn3DImage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageView2DOn3DImage = imageView2DOn3DImage_;
+      return *this;
+    }
+
+    PhysicalDevicePortabilitySubsetFeaturesKHR & setMultisampleArrayImage( VULKAN_HPP_NAMESPACE::Bool32 multisampleArrayImage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      multisampleArrayImage = multisampleArrayImage_;
+      return *this;
+    }
+
+    PhysicalDevicePortabilitySubsetFeaturesKHR & setMutableComparisonSamplers( VULKAN_HPP_NAMESPACE::Bool32 mutableComparisonSamplers_ ) VULKAN_HPP_NOEXCEPT
+    {
+      mutableComparisonSamplers = mutableComparisonSamplers_;
+      return *this;
+    }
+
+    PhysicalDevicePortabilitySubsetFeaturesKHR & setPointPolygons( VULKAN_HPP_NAMESPACE::Bool32 pointPolygons_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pointPolygons = pointPolygons_;
+      return *this;
+    }
+
+    PhysicalDevicePortabilitySubsetFeaturesKHR & setSamplerMipLodBias( VULKAN_HPP_NAMESPACE::Bool32 samplerMipLodBias_ ) VULKAN_HPP_NOEXCEPT
+    {
+      samplerMipLodBias = samplerMipLodBias_;
+      return *this;
+    }
+
+    PhysicalDevicePortabilitySubsetFeaturesKHR & setSeparateStencilMaskRef( VULKAN_HPP_NAMESPACE::Bool32 separateStencilMaskRef_ ) VULKAN_HPP_NOEXCEPT
+    {
+      separateStencilMaskRef = separateStencilMaskRef_;
+      return *this;
+    }
+
+    PhysicalDevicePortabilitySubsetFeaturesKHR & setShaderSampleRateInterpolationFunctions( VULKAN_HPP_NAMESPACE::Bool32 shaderSampleRateInterpolationFunctions_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderSampleRateInterpolationFunctions = shaderSampleRateInterpolationFunctions_;
+      return *this;
+    }
+
+    PhysicalDevicePortabilitySubsetFeaturesKHR & setTessellationIsolines( VULKAN_HPP_NAMESPACE::Bool32 tessellationIsolines_ ) VULKAN_HPP_NOEXCEPT
+    {
+      tessellationIsolines = tessellationIsolines_;
+      return *this;
+    }
+
+    PhysicalDevicePortabilitySubsetFeaturesKHR & setTessellationPointMode( VULKAN_HPP_NAMESPACE::Bool32 tessellationPointMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      tessellationPointMode = tessellationPointMode_;
+      return *this;
+    }
+
+    PhysicalDevicePortabilitySubsetFeaturesKHR & setTriangleFans( VULKAN_HPP_NAMESPACE::Bool32 triangleFans_ ) VULKAN_HPP_NOEXCEPT
+    {
+      triangleFans = triangleFans_;
+      return *this;
+    }
+
+    PhysicalDevicePortabilitySubsetFeaturesKHR & setVertexAttributeAccessBeyondStride( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeAccessBeyondStride_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vertexAttributeAccessBeyondStride = vertexAttributeAccessBeyondStride_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDevicePortabilitySubsetFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDevicePortabilitySubsetFeaturesKHR*>( this );
+    }
+
+    operator VkPhysicalDevicePortabilitySubsetFeaturesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDevicePortabilitySubsetFeaturesKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDevicePortabilitySubsetFeaturesKHR const& ) const = default;
+#else
+    bool operator==( PhysicalDevicePortabilitySubsetFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
-          && ( reductionMode == rhs.reductionMode );
+          && ( constantAlphaColorBlendFactors == rhs.constantAlphaColorBlendFactors )
+          && ( events == rhs.events )
+          && ( imageViewFormatReinterpretation == rhs.imageViewFormatReinterpretation )
+          && ( imageViewFormatSwizzle == rhs.imageViewFormatSwizzle )
+          && ( imageView2DOn3DImage == rhs.imageView2DOn3DImage )
+          && ( multisampleArrayImage == rhs.multisampleArrayImage )
+          && ( mutableComparisonSamplers == rhs.mutableComparisonSamplers )
+          && ( pointPolygons == rhs.pointPolygons )
+          && ( samplerMipLodBias == rhs.samplerMipLodBias )
+          && ( separateStencilMaskRef == rhs.separateStencilMaskRef )
+          && ( shaderSampleRateInterpolationFunctions == rhs.shaderSampleRateInterpolationFunctions )
+          && ( tessellationIsolines == rhs.tessellationIsolines )
+          && ( tessellationPointMode == rhs.tessellationPointMode )
+          && ( triangleFans == rhs.triangleFans )
+          && ( vertexAttributeAccessBeyondStride == rhs.vertexAttributeAccessBeyondStride );
     }
 
-    bool operator!=( SamplerReductionModeCreateInfoEXT const& rhs ) const
+    bool operator!=( PhysicalDevicePortabilitySubsetFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
+#endif
 
-  private:
-    StructureType sType = StructureType::eSamplerReductionModeCreateInfoEXT;
+
 
   public:
-    const void* pNext = nullptr;
-    SamplerReductionModeEXT reductionMode;
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePortabilitySubsetFeaturesKHR;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 constantAlphaColorBlendFactors = {};
+    VULKAN_HPP_NAMESPACE::Bool32 events = {};
+    VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatReinterpretation = {};
+    VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatSwizzle = {};
+    VULKAN_HPP_NAMESPACE::Bool32 imageView2DOn3DImage = {};
+    VULKAN_HPP_NAMESPACE::Bool32 multisampleArrayImage = {};
+    VULKAN_HPP_NAMESPACE::Bool32 mutableComparisonSamplers = {};
+    VULKAN_HPP_NAMESPACE::Bool32 pointPolygons = {};
+    VULKAN_HPP_NAMESPACE::Bool32 samplerMipLodBias = {};
+    VULKAN_HPP_NAMESPACE::Bool32 separateStencilMaskRef = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderSampleRateInterpolationFunctions = {};
+    VULKAN_HPP_NAMESPACE::Bool32 tessellationIsolines = {};
+    VULKAN_HPP_NAMESPACE::Bool32 tessellationPointMode = {};
+    VULKAN_HPP_NAMESPACE::Bool32 triangleFans = {};
+    VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeAccessBeyondStride = {};
+
   };
-  static_assert( sizeof( SamplerReductionModeCreateInfoEXT ) == sizeof( VkSamplerReductionModeCreateInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( sizeof( PhysicalDevicePortabilitySubsetFeaturesKHR ) == sizeof( VkPhysicalDevicePortabilitySubsetFeaturesKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDevicePortabilitySubsetFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
 
-  enum class TessellationDomainOrigin
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDevicePortabilitySubsetFeaturesKHR>
   {
-    eUpperLeft = VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT,
-    eUpperLeftKHR = VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT,
-    eLowerLeft = VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT,
-    eLowerLeftKHR = VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT
+    using Type = PhysicalDevicePortabilitySubsetFeaturesKHR;
   };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
 
-  struct PipelineTessellationDomainOriginStateCreateInfo
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+  struct PhysicalDevicePortabilitySubsetPropertiesKHR
   {
-    PipelineTessellationDomainOriginStateCreateInfo( TessellationDomainOrigin domainOrigin_ = TessellationDomainOrigin::eUpperLeft )
-      : domainOrigin( domainOrigin_ )
-    {
-    }
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePortabilitySubsetPropertiesKHR;
 
-    PipelineTessellationDomainOriginStateCreateInfo( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PipelineTessellationDomainOriginStateCreateInfo ) );
-    }
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetPropertiesKHR(uint32_t minVertexInputBindingStrideAlignment_ = {}) VULKAN_HPP_NOEXCEPT
+    : minVertexInputBindingStrideAlignment( minVertexInputBindingStrideAlignment_ )
+    {}
 
-    PipelineTessellationDomainOriginStateCreateInfo& operator=( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs )
+    VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetPropertiesKHR( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDevicePortabilitySubsetPropertiesKHR( VkPhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      memcpy( this, &rhs, sizeof( PipelineTessellationDomainOriginStateCreateInfo ) );
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDevicePortabilitySubsetPropertiesKHR & operator=( VkPhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetPropertiesKHR const *>( &rhs );
       return *this;
     }
-    PipelineTessellationDomainOriginStateCreateInfo& setPNext( const void* pNext_ )
+
+    PhysicalDevicePortabilitySubsetPropertiesKHR & operator=( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDevicePortabilitySubsetPropertiesKHR ) );
+      return *this;
+    }
+
+    PhysicalDevicePortabilitySubsetPropertiesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
       pNext = pNext_;
       return *this;
     }
 
-    PipelineTessellationDomainOriginStateCreateInfo& setDomainOrigin( TessellationDomainOrigin domainOrigin_ )
+    PhysicalDevicePortabilitySubsetPropertiesKHR & setMinVertexInputBindingStrideAlignment( uint32_t minVertexInputBindingStrideAlignment_ ) VULKAN_HPP_NOEXCEPT
     {
-      domainOrigin = domainOrigin_;
+      minVertexInputBindingStrideAlignment = minVertexInputBindingStrideAlignment_;
       return *this;
     }
 
-    operator VkPipelineTessellationDomainOriginStateCreateInfo const&() const
+
+    operator VkPhysicalDevicePortabilitySubsetPropertiesKHR const&() const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkPipelineTessellationDomainOriginStateCreateInfo*>(this);
+      return *reinterpret_cast<const VkPhysicalDevicePortabilitySubsetPropertiesKHR*>( this );
     }
 
-    operator VkPipelineTessellationDomainOriginStateCreateInfo &()
+    operator VkPhysicalDevicePortabilitySubsetPropertiesKHR &() VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkPipelineTessellationDomainOriginStateCreateInfo*>(this);
+      return *reinterpret_cast<VkPhysicalDevicePortabilitySubsetPropertiesKHR*>( this );
     }
 
-    bool operator==( PipelineTessellationDomainOriginStateCreateInfo const& rhs ) const
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDevicePortabilitySubsetPropertiesKHR const& ) const = default;
+#else
+    bool operator==( PhysicalDevicePortabilitySubsetPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
-          && ( domainOrigin == rhs.domainOrigin );
+          && ( minVertexInputBindingStrideAlignment == rhs.minVertexInputBindingStrideAlignment );
     }
 
-    bool operator!=( PipelineTessellationDomainOriginStateCreateInfo const& rhs ) const
+    bool operator!=( PhysicalDevicePortabilitySubsetPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
+#endif
 
-  private:
-    StructureType sType = StructureType::ePipelineTessellationDomainOriginStateCreateInfo;
+
 
   public:
-    const void* pNext = nullptr;
-    TessellationDomainOrigin domainOrigin;
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePortabilitySubsetPropertiesKHR;
+    void* pNext = {};
+    uint32_t minVertexInputBindingStrideAlignment = {};
+
   };
-  static_assert( sizeof( PipelineTessellationDomainOriginStateCreateInfo ) == sizeof( VkPipelineTessellationDomainOriginStateCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( sizeof( PhysicalDevicePortabilitySubsetPropertiesKHR ) == sizeof( VkPhysicalDevicePortabilitySubsetPropertiesKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDevicePortabilitySubsetPropertiesKHR>::value, "struct wrapper is not a standard layout!" );
 
-  using PipelineTessellationDomainOriginStateCreateInfoKHR = PipelineTessellationDomainOriginStateCreateInfo;
-
-  enum class SamplerYcbcrModelConversion
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDevicePortabilitySubsetPropertiesKHR>
   {
-    eRgbIdentity = VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY,
-    eRgbIdentityKHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY,
-    eYcbcrIdentity = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY,
-    eYcbcrIdentityKHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY,
-    eYcbcr709 = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709,
-    eYcbcr709KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709,
-    eYcbcr601 = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601,
-    eYcbcr601KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601,
-    eYcbcr2020 = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020,
-    eYcbcr2020KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020
+    using Type = PhysicalDevicePortabilitySubsetPropertiesKHR;
   };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
 
-  enum class SamplerYcbcrRange
+  struct PhysicalDevicePrivateDataFeaturesEXT
   {
-    eItuFull = VK_SAMPLER_YCBCR_RANGE_ITU_FULL,
-    eItuFullKHR = VK_SAMPLER_YCBCR_RANGE_ITU_FULL,
-    eItuNarrow = VK_SAMPLER_YCBCR_RANGE_ITU_NARROW,
-    eItuNarrowKHR = VK_SAMPLER_YCBCR_RANGE_ITU_NARROW
-  };
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePrivateDataFeaturesEXT;
 
-  enum class ChromaLocation
-  {
-    eCositedEven = VK_CHROMA_LOCATION_COSITED_EVEN,
-    eCositedEvenKHR = VK_CHROMA_LOCATION_COSITED_EVEN,
-    eMidpoint = VK_CHROMA_LOCATION_MIDPOINT,
-    eMidpointKHR = VK_CHROMA_LOCATION_MIDPOINT
-  };
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDevicePrivateDataFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 privateData_ = {}) VULKAN_HPP_NOEXCEPT
+    : privateData( privateData_ )
+    {}
 
-  struct SamplerYcbcrConversionCreateInfo
-  {
-    SamplerYcbcrConversionCreateInfo( Format format_ = Format::eUndefined,
-                                      SamplerYcbcrModelConversion ycbcrModel_ = SamplerYcbcrModelConversion::eRgbIdentity,
-                                      SamplerYcbcrRange ycbcrRange_ = SamplerYcbcrRange::eItuFull,
-                                      ComponentMapping components_ = ComponentMapping(),
-                                      ChromaLocation xChromaOffset_ = ChromaLocation::eCositedEven,
-                                      ChromaLocation yChromaOffset_ = ChromaLocation::eCositedEven,
-                                      Filter chromaFilter_ = Filter::eNearest,
-                                      Bool32 forceExplicitReconstruction_ = 0 )
-      : format( format_ )
-      , ycbcrModel( ycbcrModel_ )
-      , ycbcrRange( ycbcrRange_ )
-      , components( components_ )
-      , xChromaOffset( xChromaOffset_ )
-      , yChromaOffset( yChromaOffset_ )
-      , chromaFilter( chromaFilter_ )
-      , forceExplicitReconstruction( forceExplicitReconstruction_ )
+    VULKAN_HPP_CONSTEXPR PhysicalDevicePrivateDataFeaturesEXT( PhysicalDevicePrivateDataFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDevicePrivateDataFeaturesEXT( VkPhysicalDevicePrivateDataFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
     {
+      *this = rhs;
     }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    SamplerYcbcrConversionCreateInfo( VkSamplerYcbcrConversionCreateInfo const & rhs )
+    PhysicalDevicePrivateDataFeaturesEXT & operator=( VkPhysicalDevicePrivateDataFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      memcpy( this, &rhs, sizeof( SamplerYcbcrConversionCreateInfo ) );
-    }
-
-    SamplerYcbcrConversionCreateInfo& operator=( VkSamplerYcbcrConversionCreateInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( SamplerYcbcrConversionCreateInfo ) );
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePrivateDataFeaturesEXT const *>( &rhs );
       return *this;
     }
-    SamplerYcbcrConversionCreateInfo& setPNext( const void* pNext_ )
+
+    PhysicalDevicePrivateDataFeaturesEXT & operator=( PhysicalDevicePrivateDataFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDevicePrivateDataFeaturesEXT ) );
+      return *this;
+    }
+
+    PhysicalDevicePrivateDataFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
       pNext = pNext_;
       return *this;
     }
 
-    SamplerYcbcrConversionCreateInfo& setFormat( Format format_ )
+    PhysicalDevicePrivateDataFeaturesEXT & setPrivateData( VULKAN_HPP_NAMESPACE::Bool32 privateData_ ) VULKAN_HPP_NOEXCEPT
     {
-      format = format_;
+      privateData = privateData_;
       return *this;
     }
 
-    SamplerYcbcrConversionCreateInfo& setYcbcrModel( SamplerYcbcrModelConversion ycbcrModel_ )
+
+    operator VkPhysicalDevicePrivateDataFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
     {
-      ycbcrModel = ycbcrModel_;
-      return *this;
+      return *reinterpret_cast<const VkPhysicalDevicePrivateDataFeaturesEXT*>( this );
     }
 
-    SamplerYcbcrConversionCreateInfo& setYcbcrRange( SamplerYcbcrRange ycbcrRange_ )
+    operator VkPhysicalDevicePrivateDataFeaturesEXT &() VULKAN_HPP_NOEXCEPT
     {
-      ycbcrRange = ycbcrRange_;
-      return *this;
+      return *reinterpret_cast<VkPhysicalDevicePrivateDataFeaturesEXT*>( this );
     }
 
-    SamplerYcbcrConversionCreateInfo& setComponents( ComponentMapping components_ )
-    {
-      components = components_;
-      return *this;
-    }
 
-    SamplerYcbcrConversionCreateInfo& setXChromaOffset( ChromaLocation xChromaOffset_ )
-    {
-      xChromaOffset = xChromaOffset_;
-      return *this;
-    }
-
-    SamplerYcbcrConversionCreateInfo& setYChromaOffset( ChromaLocation yChromaOffset_ )
-    {
-      yChromaOffset = yChromaOffset_;
-      return *this;
-    }
-
-    SamplerYcbcrConversionCreateInfo& setChromaFilter( Filter chromaFilter_ )
-    {
-      chromaFilter = chromaFilter_;
-      return *this;
-    }
-
-    SamplerYcbcrConversionCreateInfo& setForceExplicitReconstruction( Bool32 forceExplicitReconstruction_ )
-    {
-      forceExplicitReconstruction = forceExplicitReconstruction_;
-      return *this;
-    }
-
-    operator VkSamplerYcbcrConversionCreateInfo const&() const
-    {
-      return *reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo*>(this);
-    }
-
-    operator VkSamplerYcbcrConversionCreateInfo &()
-    {
-      return *reinterpret_cast<VkSamplerYcbcrConversionCreateInfo*>(this);
-    }
-
-    bool operator==( SamplerYcbcrConversionCreateInfo const& rhs ) const
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDevicePrivateDataFeaturesEXT const& ) const = default;
+#else
+    bool operator==( PhysicalDevicePrivateDataFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
-          && ( format == rhs.format )
-          && ( ycbcrModel == rhs.ycbcrModel )
-          && ( ycbcrRange == rhs.ycbcrRange )
-          && ( components == rhs.components )
-          && ( xChromaOffset == rhs.xChromaOffset )
-          && ( yChromaOffset == rhs.yChromaOffset )
-          && ( chromaFilter == rhs.chromaFilter )
-          && ( forceExplicitReconstruction == rhs.forceExplicitReconstruction );
+          && ( privateData == rhs.privateData );
     }
 
-    bool operator!=( SamplerYcbcrConversionCreateInfo const& rhs ) const
+    bool operator!=( PhysicalDevicePrivateDataFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
+#endif
 
-  private:
-    StructureType sType = StructureType::eSamplerYcbcrConversionCreateInfo;
+
 
   public:
-    const void* pNext = nullptr;
-    Format format;
-    SamplerYcbcrModelConversion ycbcrModel;
-    SamplerYcbcrRange ycbcrRange;
-    ComponentMapping components;
-    ChromaLocation xChromaOffset;
-    ChromaLocation yChromaOffset;
-    Filter chromaFilter;
-    Bool32 forceExplicitReconstruction;
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePrivateDataFeaturesEXT;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 privateData = {};
+
   };
-  static_assert( sizeof( SamplerYcbcrConversionCreateInfo ) == sizeof( VkSamplerYcbcrConversionCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( sizeof( PhysicalDevicePrivateDataFeaturesEXT ) == sizeof( VkPhysicalDevicePrivateDataFeaturesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDevicePrivateDataFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
 
-  using SamplerYcbcrConversionCreateInfoKHR = SamplerYcbcrConversionCreateInfo;
-
-#ifdef VK_USE_PLATFORM_ANDROID_ANDROID
-  struct AndroidHardwareBufferFormatPropertiesANDROID
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDevicePrivateDataFeaturesEXT>
   {
-    operator VkAndroidHardwareBufferFormatPropertiesANDROID const&() const
+    using Type = PhysicalDevicePrivateDataFeaturesEXT;
+  };
+
+  struct PhysicalDeviceProtectedMemoryFeatures
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProtectedMemoryFeatures;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryFeatures(VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ = {}) VULKAN_HPP_NOEXCEPT
+    : protectedMemory( protectedMemory_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryFeatures( PhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceProtectedMemoryFeatures( VkPhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkAndroidHardwareBufferFormatPropertiesANDROID*>(this);
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceProtectedMemoryFeatures & operator=( VkPhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryFeatures const *>( &rhs );
+      return *this;
     }
 
-    operator VkAndroidHardwareBufferFormatPropertiesANDROID &()
+    PhysicalDeviceProtectedMemoryFeatures & operator=( PhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkAndroidHardwareBufferFormatPropertiesANDROID*>(this);
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceProtectedMemoryFeatures ) );
+      return *this;
     }
 
-    bool operator==( AndroidHardwareBufferFormatPropertiesANDROID const& rhs ) const
+    PhysicalDeviceProtectedMemoryFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceProtectedMemoryFeatures & setProtectedMemory( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ ) VULKAN_HPP_NOEXCEPT
+    {
+      protectedMemory = protectedMemory_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceProtectedMemoryFeatures const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceProtectedMemoryFeatures*>( this );
+    }
+
+    operator VkPhysicalDeviceProtectedMemoryFeatures &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceProtectedMemoryFeatures*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceProtectedMemoryFeatures const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceProtectedMemoryFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
-          && ( format == rhs.format )
-          && ( externalFormat == rhs.externalFormat )
-          && ( formatFeatures == rhs.formatFeatures )
-          && ( samplerYcbcrConversionComponents == rhs.samplerYcbcrConversionComponents )
-          && ( suggestedYcbcrModel == rhs.suggestedYcbcrModel )
-          && ( suggestedYcbcrRange == rhs.suggestedYcbcrRange )
-          && ( suggestedXChromaOffset == rhs.suggestedXChromaOffset )
-          && ( suggestedYChromaOffset == rhs.suggestedYChromaOffset );
+          && ( protectedMemory == rhs.protectedMemory );
     }
 
-    bool operator!=( AndroidHardwareBufferFormatPropertiesANDROID const& rhs ) const
+    bool operator!=( PhysicalDeviceProtectedMemoryFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
+#endif
 
-  private:
-    StructureType sType = StructureType::eAndroidHardwareBufferFormatPropertiesANDROID;
+
 
   public:
-    void* pNext = nullptr;
-    Format format;
-    uint64_t externalFormat;
-    FormatFeatureFlags formatFeatures;
-    ComponentMapping samplerYcbcrConversionComponents;
-    SamplerYcbcrModelConversion suggestedYcbcrModel;
-    SamplerYcbcrRange suggestedYcbcrRange;
-    ChromaLocation suggestedXChromaOffset;
-    ChromaLocation suggestedYChromaOffset;
-  };
-  static_assert( sizeof( AndroidHardwareBufferFormatPropertiesANDROID ) == sizeof( VkAndroidHardwareBufferFormatPropertiesANDROID ), "struct and wrapper have different size!" );
-#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProtectedMemoryFeatures;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 protectedMemory = {};
 
-  enum class BlendOverlapEXT
+  };
+  static_assert( sizeof( PhysicalDeviceProtectedMemoryFeatures ) == sizeof( VkPhysicalDeviceProtectedMemoryFeatures ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceProtectedMemoryFeatures>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceProtectedMemoryFeatures>
   {
-    eUncorrelated = VK_BLEND_OVERLAP_UNCORRELATED_EXT,
-    eDisjoint = VK_BLEND_OVERLAP_DISJOINT_EXT,
-    eConjoint = VK_BLEND_OVERLAP_CONJOINT_EXT
+    using Type = PhysicalDeviceProtectedMemoryFeatures;
+  };
+
+  struct PhysicalDeviceProtectedMemoryProperties
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProtectedMemoryProperties;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryProperties(VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault_ = {}) VULKAN_HPP_NOEXCEPT
+    : protectedNoFault( protectedNoFault_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryProperties( PhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceProtectedMemoryProperties( VkPhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceProtectedMemoryProperties & operator=( VkPhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryProperties const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceProtectedMemoryProperties & operator=( PhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceProtectedMemoryProperties ) );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceProtectedMemoryProperties const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceProtectedMemoryProperties*>( this );
+    }
+
+    operator VkPhysicalDeviceProtectedMemoryProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceProtectedMemoryProperties*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceProtectedMemoryProperties const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceProtectedMemoryProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( protectedNoFault == rhs.protectedNoFault );
+    }
+
+    bool operator!=( PhysicalDeviceProtectedMemoryProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProtectedMemoryProperties;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceProtectedMemoryProperties ) == sizeof( VkPhysicalDeviceProtectedMemoryProperties ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceProtectedMemoryProperties>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceProtectedMemoryProperties>
+  {
+    using Type = PhysicalDeviceProtectedMemoryProperties;
+  };
+
+  struct PhysicalDevicePushDescriptorPropertiesKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePushDescriptorPropertiesKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDevicePushDescriptorPropertiesKHR(uint32_t maxPushDescriptors_ = {}) VULKAN_HPP_NOEXCEPT
+    : maxPushDescriptors( maxPushDescriptors_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDevicePushDescriptorPropertiesKHR( PhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDevicePushDescriptorPropertiesKHR( VkPhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDevicePushDescriptorPropertiesKHR & operator=( VkPhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorPropertiesKHR const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDevicePushDescriptorPropertiesKHR & operator=( PhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDevicePushDescriptorPropertiesKHR ) );
+      return *this;
+    }
+
+
+    operator VkPhysicalDevicePushDescriptorPropertiesKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDevicePushDescriptorPropertiesKHR*>( this );
+    }
+
+    operator VkPhysicalDevicePushDescriptorPropertiesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDevicePushDescriptorPropertiesKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDevicePushDescriptorPropertiesKHR const& ) const = default;
+#else
+    bool operator==( PhysicalDevicePushDescriptorPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxPushDescriptors == rhs.maxPushDescriptors );
+    }
+
+    bool operator!=( PhysicalDevicePushDescriptorPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePushDescriptorPropertiesKHR;
+    void* pNext = {};
+    uint32_t maxPushDescriptors = {};
+
+  };
+  static_assert( sizeof( PhysicalDevicePushDescriptorPropertiesKHR ) == sizeof( VkPhysicalDevicePushDescriptorPropertiesKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDevicePushDescriptorPropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDevicePushDescriptorPropertiesKHR>
+  {
+    using Type = PhysicalDevicePushDescriptorPropertiesKHR;
+  };
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+  struct PhysicalDeviceRayTracingFeaturesKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingFeaturesKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 rayTracing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rayTracingShaderGroupHandleCaptureReplay_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rayTracingShaderGroupHandleCaptureReplayMixed_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rayTracingAccelerationStructureCaptureReplay_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rayTracingIndirectTraceRays_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rayTracingIndirectAccelerationStructureBuild_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rayTracingHostAccelerationStructureCommands_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rayQuery_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rayTracingPrimitiveCulling_ = {}) VULKAN_HPP_NOEXCEPT
+    : rayTracing( rayTracing_ ), rayTracingShaderGroupHandleCaptureReplay( rayTracingShaderGroupHandleCaptureReplay_ ), rayTracingShaderGroupHandleCaptureReplayMixed( rayTracingShaderGroupHandleCaptureReplayMixed_ ), rayTracingAccelerationStructureCaptureReplay( rayTracingAccelerationStructureCaptureReplay_ ), rayTracingIndirectTraceRays( rayTracingIndirectTraceRays_ ), rayTracingIndirectAccelerationStructureBuild( rayTracingIndirectAccelerationStructureBuild_ ), rayTracingHostAccelerationStructureCommands( rayTracingHostAccelerationStructureCommands_ ), rayQuery( rayQuery_ ), rayTracingPrimitiveCulling( rayTracingPrimitiveCulling_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingFeaturesKHR( PhysicalDeviceRayTracingFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceRayTracingFeaturesKHR( VkPhysicalDeviceRayTracingFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceRayTracingFeaturesKHR & operator=( VkPhysicalDeviceRayTracingFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingFeaturesKHR const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceRayTracingFeaturesKHR & operator=( PhysicalDeviceRayTracingFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceRayTracingFeaturesKHR ) );
+      return *this;
+    }
+
+    PhysicalDeviceRayTracingFeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceRayTracingFeaturesKHR & setRayTracing( VULKAN_HPP_NAMESPACE::Bool32 rayTracing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      rayTracing = rayTracing_;
+      return *this;
+    }
+
+    PhysicalDeviceRayTracingFeaturesKHR & setRayTracingShaderGroupHandleCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 rayTracingShaderGroupHandleCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
+    {
+      rayTracingShaderGroupHandleCaptureReplay = rayTracingShaderGroupHandleCaptureReplay_;
+      return *this;
+    }
+
+    PhysicalDeviceRayTracingFeaturesKHR & setRayTracingShaderGroupHandleCaptureReplayMixed( VULKAN_HPP_NAMESPACE::Bool32 rayTracingShaderGroupHandleCaptureReplayMixed_ ) VULKAN_HPP_NOEXCEPT
+    {
+      rayTracingShaderGroupHandleCaptureReplayMixed = rayTracingShaderGroupHandleCaptureReplayMixed_;
+      return *this;
+    }
+
+    PhysicalDeviceRayTracingFeaturesKHR & setRayTracingAccelerationStructureCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 rayTracingAccelerationStructureCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
+    {
+      rayTracingAccelerationStructureCaptureReplay = rayTracingAccelerationStructureCaptureReplay_;
+      return *this;
+    }
+
+    PhysicalDeviceRayTracingFeaturesKHR & setRayTracingIndirectTraceRays( VULKAN_HPP_NAMESPACE::Bool32 rayTracingIndirectTraceRays_ ) VULKAN_HPP_NOEXCEPT
+    {
+      rayTracingIndirectTraceRays = rayTracingIndirectTraceRays_;
+      return *this;
+    }
+
+    PhysicalDeviceRayTracingFeaturesKHR & setRayTracingIndirectAccelerationStructureBuild( VULKAN_HPP_NAMESPACE::Bool32 rayTracingIndirectAccelerationStructureBuild_ ) VULKAN_HPP_NOEXCEPT
+    {
+      rayTracingIndirectAccelerationStructureBuild = rayTracingIndirectAccelerationStructureBuild_;
+      return *this;
+    }
+
+    PhysicalDeviceRayTracingFeaturesKHR & setRayTracingHostAccelerationStructureCommands( VULKAN_HPP_NAMESPACE::Bool32 rayTracingHostAccelerationStructureCommands_ ) VULKAN_HPP_NOEXCEPT
+    {
+      rayTracingHostAccelerationStructureCommands = rayTracingHostAccelerationStructureCommands_;
+      return *this;
+    }
+
+    PhysicalDeviceRayTracingFeaturesKHR & setRayQuery( VULKAN_HPP_NAMESPACE::Bool32 rayQuery_ ) VULKAN_HPP_NOEXCEPT
+    {
+      rayQuery = rayQuery_;
+      return *this;
+    }
+
+    PhysicalDeviceRayTracingFeaturesKHR & setRayTracingPrimitiveCulling( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPrimitiveCulling_ ) VULKAN_HPP_NOEXCEPT
+    {
+      rayTracingPrimitiveCulling = rayTracingPrimitiveCulling_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceRayTracingFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceRayTracingFeaturesKHR*>( this );
+    }
+
+    operator VkPhysicalDeviceRayTracingFeaturesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceRayTracingFeaturesKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceRayTracingFeaturesKHR const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceRayTracingFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( rayTracing == rhs.rayTracing )
+          && ( rayTracingShaderGroupHandleCaptureReplay == rhs.rayTracingShaderGroupHandleCaptureReplay )
+          && ( rayTracingShaderGroupHandleCaptureReplayMixed == rhs.rayTracingShaderGroupHandleCaptureReplayMixed )
+          && ( rayTracingAccelerationStructureCaptureReplay == rhs.rayTracingAccelerationStructureCaptureReplay )
+          && ( rayTracingIndirectTraceRays == rhs.rayTracingIndirectTraceRays )
+          && ( rayTracingIndirectAccelerationStructureBuild == rhs.rayTracingIndirectAccelerationStructureBuild )
+          && ( rayTracingHostAccelerationStructureCommands == rhs.rayTracingHostAccelerationStructureCommands )
+          && ( rayQuery == rhs.rayQuery )
+          && ( rayTracingPrimitiveCulling == rhs.rayTracingPrimitiveCulling );
+    }
+
+    bool operator!=( PhysicalDeviceRayTracingFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingFeaturesKHR;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 rayTracing = {};
+    VULKAN_HPP_NAMESPACE::Bool32 rayTracingShaderGroupHandleCaptureReplay = {};
+    VULKAN_HPP_NAMESPACE::Bool32 rayTracingShaderGroupHandleCaptureReplayMixed = {};
+    VULKAN_HPP_NAMESPACE::Bool32 rayTracingAccelerationStructureCaptureReplay = {};
+    VULKAN_HPP_NAMESPACE::Bool32 rayTracingIndirectTraceRays = {};
+    VULKAN_HPP_NAMESPACE::Bool32 rayTracingIndirectAccelerationStructureBuild = {};
+    VULKAN_HPP_NAMESPACE::Bool32 rayTracingHostAccelerationStructureCommands = {};
+    VULKAN_HPP_NAMESPACE::Bool32 rayQuery = {};
+    VULKAN_HPP_NAMESPACE::Bool32 rayTracingPrimitiveCulling = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceRayTracingFeaturesKHR ) == sizeof( VkPhysicalDeviceRayTracingFeaturesKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceRayTracingFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceRayTracingFeaturesKHR>
+  {
+    using Type = PhysicalDeviceRayTracingFeaturesKHR;
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+  struct PhysicalDeviceRayTracingPropertiesKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingPropertiesKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPropertiesKHR(uint32_t shaderGroupHandleSize_ = {}, uint32_t maxRecursionDepth_ = {}, uint32_t maxShaderGroupStride_ = {}, uint32_t shaderGroupBaseAlignment_ = {}, uint64_t maxGeometryCount_ = {}, uint64_t maxInstanceCount_ = {}, uint64_t maxPrimitiveCount_ = {}, uint32_t maxDescriptorSetAccelerationStructures_ = {}, uint32_t shaderGroupHandleCaptureReplaySize_ = {}) VULKAN_HPP_NOEXCEPT
+    : shaderGroupHandleSize( shaderGroupHandleSize_ ), maxRecursionDepth( maxRecursionDepth_ ), maxShaderGroupStride( maxShaderGroupStride_ ), shaderGroupBaseAlignment( shaderGroupBaseAlignment_ ), maxGeometryCount( maxGeometryCount_ ), maxInstanceCount( maxInstanceCount_ ), maxPrimitiveCount( maxPrimitiveCount_ ), maxDescriptorSetAccelerationStructures( maxDescriptorSetAccelerationStructures_ ), shaderGroupHandleCaptureReplaySize( shaderGroupHandleCaptureReplaySize_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPropertiesKHR( PhysicalDeviceRayTracingPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceRayTracingPropertiesKHR( VkPhysicalDeviceRayTracingPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceRayTracingPropertiesKHR & operator=( VkPhysicalDeviceRayTracingPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPropertiesKHR const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceRayTracingPropertiesKHR & operator=( PhysicalDeviceRayTracingPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceRayTracingPropertiesKHR ) );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceRayTracingPropertiesKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceRayTracingPropertiesKHR*>( this );
+    }
+
+    operator VkPhysicalDeviceRayTracingPropertiesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceRayTracingPropertiesKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceRayTracingPropertiesKHR const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceRayTracingPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shaderGroupHandleSize == rhs.shaderGroupHandleSize )
+          && ( maxRecursionDepth == rhs.maxRecursionDepth )
+          && ( maxShaderGroupStride == rhs.maxShaderGroupStride )
+          && ( shaderGroupBaseAlignment == rhs.shaderGroupBaseAlignment )
+          && ( maxGeometryCount == rhs.maxGeometryCount )
+          && ( maxInstanceCount == rhs.maxInstanceCount )
+          && ( maxPrimitiveCount == rhs.maxPrimitiveCount )
+          && ( maxDescriptorSetAccelerationStructures == rhs.maxDescriptorSetAccelerationStructures )
+          && ( shaderGroupHandleCaptureReplaySize == rhs.shaderGroupHandleCaptureReplaySize );
+    }
+
+    bool operator!=( PhysicalDeviceRayTracingPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingPropertiesKHR;
+    void* pNext = {};
+    uint32_t shaderGroupHandleSize = {};
+    uint32_t maxRecursionDepth = {};
+    uint32_t maxShaderGroupStride = {};
+    uint32_t shaderGroupBaseAlignment = {};
+    uint64_t maxGeometryCount = {};
+    uint64_t maxInstanceCount = {};
+    uint64_t maxPrimitiveCount = {};
+    uint32_t maxDescriptorSetAccelerationStructures = {};
+    uint32_t shaderGroupHandleCaptureReplaySize = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceRayTracingPropertiesKHR ) == sizeof( VkPhysicalDeviceRayTracingPropertiesKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceRayTracingPropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceRayTracingPropertiesKHR>
+  {
+    using Type = PhysicalDeviceRayTracingPropertiesKHR;
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  struct PhysicalDeviceRayTracingPropertiesNV
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingPropertiesNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPropertiesNV(uint32_t shaderGroupHandleSize_ = {}, uint32_t maxRecursionDepth_ = {}, uint32_t maxShaderGroupStride_ = {}, uint32_t shaderGroupBaseAlignment_ = {}, uint64_t maxGeometryCount_ = {}, uint64_t maxInstanceCount_ = {}, uint64_t maxTriangleCount_ = {}, uint32_t maxDescriptorSetAccelerationStructures_ = {}) VULKAN_HPP_NOEXCEPT
+    : shaderGroupHandleSize( shaderGroupHandleSize_ ), maxRecursionDepth( maxRecursionDepth_ ), maxShaderGroupStride( maxShaderGroupStride_ ), shaderGroupBaseAlignment( shaderGroupBaseAlignment_ ), maxGeometryCount( maxGeometryCount_ ), maxInstanceCount( maxInstanceCount_ ), maxTriangleCount( maxTriangleCount_ ), maxDescriptorSetAccelerationStructures( maxDescriptorSetAccelerationStructures_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPropertiesNV( PhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceRayTracingPropertiesNV( VkPhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceRayTracingPropertiesNV & operator=( VkPhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPropertiesNV const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceRayTracingPropertiesNV & operator=( PhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceRayTracingPropertiesNV ) );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceRayTracingPropertiesNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceRayTracingPropertiesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceRayTracingPropertiesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceRayTracingPropertiesNV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceRayTracingPropertiesNV const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceRayTracingPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shaderGroupHandleSize == rhs.shaderGroupHandleSize )
+          && ( maxRecursionDepth == rhs.maxRecursionDepth )
+          && ( maxShaderGroupStride == rhs.maxShaderGroupStride )
+          && ( shaderGroupBaseAlignment == rhs.shaderGroupBaseAlignment )
+          && ( maxGeometryCount == rhs.maxGeometryCount )
+          && ( maxInstanceCount == rhs.maxInstanceCount )
+          && ( maxTriangleCount == rhs.maxTriangleCount )
+          && ( maxDescriptorSetAccelerationStructures == rhs.maxDescriptorSetAccelerationStructures );
+    }
+
+    bool operator!=( PhysicalDeviceRayTracingPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingPropertiesNV;
+    void* pNext = {};
+    uint32_t shaderGroupHandleSize = {};
+    uint32_t maxRecursionDepth = {};
+    uint32_t maxShaderGroupStride = {};
+    uint32_t shaderGroupBaseAlignment = {};
+    uint64_t maxGeometryCount = {};
+    uint64_t maxInstanceCount = {};
+    uint64_t maxTriangleCount = {};
+    uint32_t maxDescriptorSetAccelerationStructures = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceRayTracingPropertiesNV ) == sizeof( VkPhysicalDeviceRayTracingPropertiesNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceRayTracingPropertiesNV>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceRayTracingPropertiesNV>
+  {
+    using Type = PhysicalDeviceRayTracingPropertiesNV;
+  };
+
+  struct PhysicalDeviceRepresentativeFragmentTestFeaturesNV
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceRepresentativeFragmentTestFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest_ = {}) VULKAN_HPP_NOEXCEPT
+    : representativeFragmentTest( representativeFragmentTest_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceRepresentativeFragmentTestFeaturesNV( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceRepresentativeFragmentTestFeaturesNV( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceRepresentativeFragmentTestFeaturesNV & operator=( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRepresentativeFragmentTestFeaturesNV const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceRepresentativeFragmentTestFeaturesNV & operator=( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceRepresentativeFragmentTestFeaturesNV ) );
+      return *this;
+    }
+
+    PhysicalDeviceRepresentativeFragmentTestFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceRepresentativeFragmentTestFeaturesNV & setRepresentativeFragmentTest( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest_ ) VULKAN_HPP_NOEXCEPT
+    {
+      representativeFragmentTest = representativeFragmentTest_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( representativeFragmentTest == rhs.representativeFragmentTest );
+    }
+
+    bool operator!=( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceRepresentativeFragmentTestFeaturesNV ) == sizeof( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceRepresentativeFragmentTestFeaturesNV>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV>
+  {
+    using Type = PhysicalDeviceRepresentativeFragmentTestFeaturesNV;
+  };
+
+  struct PhysicalDeviceRobustness2FeaturesEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRobustness2FeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2FeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess2_ = {}, VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess2_ = {}, VULKAN_HPP_NAMESPACE::Bool32 nullDescriptor_ = {}) VULKAN_HPP_NOEXCEPT
+    : robustBufferAccess2( robustBufferAccess2_ ), robustImageAccess2( robustImageAccess2_ ), nullDescriptor( nullDescriptor_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2FeaturesEXT( PhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceRobustness2FeaturesEXT( VkPhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceRobustness2FeaturesEXT & operator=( VkPhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2FeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceRobustness2FeaturesEXT & operator=( PhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceRobustness2FeaturesEXT ) );
+      return *this;
+    }
+
+    PhysicalDeviceRobustness2FeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceRobustness2FeaturesEXT & setRobustBufferAccess2( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess2_ ) VULKAN_HPP_NOEXCEPT
+    {
+      robustBufferAccess2 = robustBufferAccess2_;
+      return *this;
+    }
+
+    PhysicalDeviceRobustness2FeaturesEXT & setRobustImageAccess2( VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess2_ ) VULKAN_HPP_NOEXCEPT
+    {
+      robustImageAccess2 = robustImageAccess2_;
+      return *this;
+    }
+
+    PhysicalDeviceRobustness2FeaturesEXT & setNullDescriptor( VULKAN_HPP_NAMESPACE::Bool32 nullDescriptor_ ) VULKAN_HPP_NOEXCEPT
+    {
+      nullDescriptor = nullDescriptor_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceRobustness2FeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceRobustness2FeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceRobustness2FeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceRobustness2FeaturesEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceRobustness2FeaturesEXT const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceRobustness2FeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( robustBufferAccess2 == rhs.robustBufferAccess2 )
+          && ( robustImageAccess2 == rhs.robustImageAccess2 )
+          && ( nullDescriptor == rhs.nullDescriptor );
+    }
+
+    bool operator!=( PhysicalDeviceRobustness2FeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRobustness2FeaturesEXT;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess2 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess2 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 nullDescriptor = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceRobustness2FeaturesEXT ) == sizeof( VkPhysicalDeviceRobustness2FeaturesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceRobustness2FeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceRobustness2FeaturesEXT>
+  {
+    using Type = PhysicalDeviceRobustness2FeaturesEXT;
+  };
+
+  struct PhysicalDeviceRobustness2PropertiesEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRobustness2PropertiesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2PropertiesEXT(VULKAN_HPP_NAMESPACE::DeviceSize robustStorageBufferAccessSizeAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize robustUniformBufferAccessSizeAlignment_ = {}) VULKAN_HPP_NOEXCEPT
+    : robustStorageBufferAccessSizeAlignment( robustStorageBufferAccessSizeAlignment_ ), robustUniformBufferAccessSizeAlignment( robustUniformBufferAccessSizeAlignment_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2PropertiesEXT( PhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceRobustness2PropertiesEXT( VkPhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceRobustness2PropertiesEXT & operator=( VkPhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2PropertiesEXT const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceRobustness2PropertiesEXT & operator=( PhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceRobustness2PropertiesEXT ) );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceRobustness2PropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceRobustness2PropertiesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceRobustness2PropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceRobustness2PropertiesEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceRobustness2PropertiesEXT const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceRobustness2PropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( robustStorageBufferAccessSizeAlignment == rhs.robustStorageBufferAccessSizeAlignment )
+          && ( robustUniformBufferAccessSizeAlignment == rhs.robustUniformBufferAccessSizeAlignment );
+    }
+
+    bool operator!=( PhysicalDeviceRobustness2PropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRobustness2PropertiesEXT;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize robustStorageBufferAccessSizeAlignment = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize robustUniformBufferAccessSizeAlignment = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceRobustness2PropertiesEXT ) == sizeof( VkPhysicalDeviceRobustness2PropertiesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceRobustness2PropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceRobustness2PropertiesEXT>
+  {
+    using Type = PhysicalDeviceRobustness2PropertiesEXT;
+  };
+
+  struct PhysicalDeviceSampleLocationsPropertiesEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSampleLocationsPropertiesEXT(VULKAN_HPP_NAMESPACE::SampleCountFlags sampleLocationSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize_ = {}, std::array<float,2> const& sampleLocationCoordinateRange_ = {}, uint32_t sampleLocationSubPixelBits_ = {}, VULKAN_HPP_NAMESPACE::Bool32 variableSampleLocations_ = {}) VULKAN_HPP_NOEXCEPT
+    : sampleLocationSampleCounts( sampleLocationSampleCounts_ ), maxSampleLocationGridSize( maxSampleLocationGridSize_ ), sampleLocationCoordinateRange( sampleLocationCoordinateRange_ ), sampleLocationSubPixelBits( sampleLocationSubPixelBits_ ), variableSampleLocations( variableSampleLocations_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSampleLocationsPropertiesEXT( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceSampleLocationsPropertiesEXT( VkPhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceSampleLocationsPropertiesEXT & operator=( VkPhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSampleLocationsPropertiesEXT const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceSampleLocationsPropertiesEXT & operator=( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceSampleLocationsPropertiesEXT ) );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceSampleLocationsPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceSampleLocationsPropertiesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceSampleLocationsPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceSampleLocationsPropertiesEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceSampleLocationsPropertiesEXT const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceSampleLocationsPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( sampleLocationSampleCounts == rhs.sampleLocationSampleCounts )
+          && ( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize )
+          && ( sampleLocationCoordinateRange == rhs.sampleLocationCoordinateRange )
+          && ( sampleLocationSubPixelBits == rhs.sampleLocationSubPixelBits )
+          && ( variableSampleLocations == rhs.variableSampleLocations );
+    }
+
+    bool operator!=( PhysicalDeviceSampleLocationsPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::SampleCountFlags sampleLocationSampleCounts = {};
+    VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 2> sampleLocationCoordinateRange = {};
+    uint32_t sampleLocationSubPixelBits = {};
+    VULKAN_HPP_NAMESPACE::Bool32 variableSampleLocations = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceSampleLocationsPropertiesEXT ) == sizeof( VkPhysicalDeviceSampleLocationsPropertiesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceSampleLocationsPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT>
+  {
+    using Type = PhysicalDeviceSampleLocationsPropertiesEXT;
+  };
+
+  struct PhysicalDeviceSamplerFilterMinmaxProperties
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerFilterMinmaxProperties(VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats_ = {}, VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping_ = {}) VULKAN_HPP_NOEXCEPT
+    : filterMinmaxSingleComponentFormats( filterMinmaxSingleComponentFormats_ ), filterMinmaxImageComponentMapping( filterMinmaxImageComponentMapping_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerFilterMinmaxProperties( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceSamplerFilterMinmaxProperties( VkPhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceSamplerFilterMinmaxProperties & operator=( VkPhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerFilterMinmaxProperties const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceSamplerFilterMinmaxProperties & operator=( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceSamplerFilterMinmaxProperties ) );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceSamplerFilterMinmaxProperties const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceSamplerFilterMinmaxProperties*>( this );
+    }
+
+    operator VkPhysicalDeviceSamplerFilterMinmaxProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceSamplerFilterMinmaxProperties*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceSamplerFilterMinmaxProperties const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceSamplerFilterMinmaxProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( filterMinmaxSingleComponentFormats == rhs.filterMinmaxSingleComponentFormats )
+          && ( filterMinmaxImageComponentMapping == rhs.filterMinmaxImageComponentMapping );
+    }
+
+    bool operator!=( PhysicalDeviceSamplerFilterMinmaxProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats = {};
+    VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceSamplerFilterMinmaxProperties ) == sizeof( VkPhysicalDeviceSamplerFilterMinmaxProperties ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceSamplerFilterMinmaxProperties>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties>
+  {
+    using Type = PhysicalDeviceSamplerFilterMinmaxProperties;
+  };
+  using PhysicalDeviceSamplerFilterMinmaxPropertiesEXT = PhysicalDeviceSamplerFilterMinmaxProperties;
+
+  struct PhysicalDeviceSamplerYcbcrConversionFeatures
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerYcbcrConversionFeatures(VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ = {}) VULKAN_HPP_NOEXCEPT
+    : samplerYcbcrConversion( samplerYcbcrConversion_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerYcbcrConversionFeatures( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceSamplerYcbcrConversionFeatures( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceSamplerYcbcrConversionFeatures & operator=( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerYcbcrConversionFeatures const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceSamplerYcbcrConversionFeatures & operator=( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceSamplerYcbcrConversionFeatures ) );
+      return *this;
+    }
+
+    PhysicalDeviceSamplerYcbcrConversionFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceSamplerYcbcrConversionFeatures & setSamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ ) VULKAN_HPP_NOEXCEPT
+    {
+      samplerYcbcrConversion = samplerYcbcrConversion_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceSamplerYcbcrConversionFeatures const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceSamplerYcbcrConversionFeatures*>( this );
+    }
+
+    operator VkPhysicalDeviceSamplerYcbcrConversionFeatures &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceSamplerYcbcrConversionFeatures*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceSamplerYcbcrConversionFeatures const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceSamplerYcbcrConversionFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( samplerYcbcrConversion == rhs.samplerYcbcrConversion );
+    }
+
+    bool operator!=( PhysicalDeviceSamplerYcbcrConversionFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceSamplerYcbcrConversionFeatures ) == sizeof( VkPhysicalDeviceSamplerYcbcrConversionFeatures ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceSamplerYcbcrConversionFeatures>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures>
+  {
+    using Type = PhysicalDeviceSamplerYcbcrConversionFeatures;
+  };
+  using PhysicalDeviceSamplerYcbcrConversionFeaturesKHR = PhysicalDeviceSamplerYcbcrConversionFeatures;
+
+  struct PhysicalDeviceScalarBlockLayoutFeatures
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceScalarBlockLayoutFeatures;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceScalarBlockLayoutFeatures(VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ = {}) VULKAN_HPP_NOEXCEPT
+    : scalarBlockLayout( scalarBlockLayout_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceScalarBlockLayoutFeatures( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceScalarBlockLayoutFeatures( VkPhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceScalarBlockLayoutFeatures & operator=( VkPhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceScalarBlockLayoutFeatures const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceScalarBlockLayoutFeatures & operator=( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceScalarBlockLayoutFeatures ) );
+      return *this;
+    }
+
+    PhysicalDeviceScalarBlockLayoutFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceScalarBlockLayoutFeatures & setScalarBlockLayout( VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      scalarBlockLayout = scalarBlockLayout_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceScalarBlockLayoutFeatures const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceScalarBlockLayoutFeatures*>( this );
+    }
+
+    operator VkPhysicalDeviceScalarBlockLayoutFeatures &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceScalarBlockLayoutFeatures*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceScalarBlockLayoutFeatures const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceScalarBlockLayoutFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( scalarBlockLayout == rhs.scalarBlockLayout );
+    }
+
+    bool operator!=( PhysicalDeviceScalarBlockLayoutFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceScalarBlockLayoutFeatures;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceScalarBlockLayoutFeatures ) == sizeof( VkPhysicalDeviceScalarBlockLayoutFeatures ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceScalarBlockLayoutFeatures>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceScalarBlockLayoutFeatures>
+  {
+    using Type = PhysicalDeviceScalarBlockLayoutFeatures;
+  };
+  using PhysicalDeviceScalarBlockLayoutFeaturesEXT = PhysicalDeviceScalarBlockLayoutFeatures;
+
+  struct PhysicalDeviceSeparateDepthStencilLayoutsFeatures
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceSeparateDepthStencilLayoutsFeatures(VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ = {}) VULKAN_HPP_NOEXCEPT
+    : separateDepthStencilLayouts( separateDepthStencilLayouts_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceSeparateDepthStencilLayoutsFeatures( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceSeparateDepthStencilLayoutsFeatures( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceSeparateDepthStencilLayoutsFeatures & operator=( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeatures const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceSeparateDepthStencilLayoutsFeatures & operator=( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceSeparateDepthStencilLayoutsFeatures ) );
+      return *this;
+    }
+
+    PhysicalDeviceSeparateDepthStencilLayoutsFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceSeparateDepthStencilLayoutsFeatures & setSeparateDepthStencilLayouts( VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ ) VULKAN_HPP_NOEXCEPT
+    {
+      separateDepthStencilLayouts = separateDepthStencilLayouts_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures*>( this );
+    }
+
+    operator VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( separateDepthStencilLayouts == rhs.separateDepthStencilLayouts );
+    }
+
+    bool operator!=( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceSeparateDepthStencilLayoutsFeatures ) == sizeof( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceSeparateDepthStencilLayoutsFeatures>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures>
+  {
+    using Type = PhysicalDeviceSeparateDepthStencilLayoutsFeatures;
+  };
+  using PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR = PhysicalDeviceSeparateDepthStencilLayoutsFeatures;
+
+  struct PhysicalDeviceShaderAtomicFloatFeaturesEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloatFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicAdd_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicAdd_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicAdd_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicAdd_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicAdd_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicAdd_ = {}) VULKAN_HPP_NOEXCEPT
+    : shaderBufferFloat32Atomics( shaderBufferFloat32Atomics_ ), shaderBufferFloat32AtomicAdd( shaderBufferFloat32AtomicAdd_ ), shaderBufferFloat64Atomics( shaderBufferFloat64Atomics_ ), shaderBufferFloat64AtomicAdd( shaderBufferFloat64AtomicAdd_ ), shaderSharedFloat32Atomics( shaderSharedFloat32Atomics_ ), shaderSharedFloat32AtomicAdd( shaderSharedFloat32AtomicAdd_ ), shaderSharedFloat64Atomics( shaderSharedFloat64Atomics_ ), shaderSharedFloat64AtomicAdd( shaderSharedFloat64AtomicAdd_ ), shaderImageFloat32Atomics( shaderImageFloat32Atomics_ ), shaderImageFloat32AtomicAdd( shaderImageFloat32AtomicAdd_ ), sparseImageFloat32Atomics( sparseImageFloat32Atomics_ ), sparseImageFloat32AtomicAdd( sparseImageFloat32AtomicAdd_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloatFeaturesEXT( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceShaderAtomicFloatFeaturesEXT( VkPhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceShaderAtomicFloatFeaturesEXT & operator=( VkPhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloatFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceShaderAtomicFloatFeaturesEXT & operator=( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceShaderAtomicFloatFeaturesEXT ) );
+      return *this;
+    }
+
+    PhysicalDeviceShaderAtomicFloatFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderBufferFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderBufferFloat32Atomics = shaderBufferFloat32Atomics_;
+      return *this;
+    }
+
+    PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderBufferFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderBufferFloat32AtomicAdd = shaderBufferFloat32AtomicAdd_;
+      return *this;
+    }
+
+    PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderBufferFloat64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64Atomics_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderBufferFloat64Atomics = shaderBufferFloat64Atomics_;
+      return *this;
+    }
+
+    PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderBufferFloat64AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderBufferFloat64AtomicAdd = shaderBufferFloat64AtomicAdd_;
+      return *this;
+    }
+
+    PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderSharedFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderSharedFloat32Atomics = shaderSharedFloat32Atomics_;
+      return *this;
+    }
+
+    PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderSharedFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderSharedFloat32AtomicAdd = shaderSharedFloat32AtomicAdd_;
+      return *this;
+    }
+
+    PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderSharedFloat64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64Atomics_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderSharedFloat64Atomics = shaderSharedFloat64Atomics_;
+      return *this;
+    }
+
+    PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderSharedFloat64AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderSharedFloat64AtomicAdd = shaderSharedFloat64AtomicAdd_;
+      return *this;
+    }
+
+    PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderImageFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderImageFloat32Atomics = shaderImageFloat32Atomics_;
+      return *this;
+    }
+
+    PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderImageFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderImageFloat32AtomicAdd = shaderImageFloat32AtomicAdd_;
+      return *this;
+    }
+
+    PhysicalDeviceShaderAtomicFloatFeaturesEXT & setSparseImageFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sparseImageFloat32Atomics = sparseImageFloat32Atomics_;
+      return *this;
+    }
+
+    PhysicalDeviceShaderAtomicFloatFeaturesEXT & setSparseImageFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sparseImageFloat32AtomicAdd = sparseImageFloat32AtomicAdd_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceShaderAtomicFloatFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceShaderAtomicFloatFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceShaderAtomicFloatFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceShaderAtomicFloatFeaturesEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceShaderAtomicFloatFeaturesEXT const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceShaderAtomicFloatFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shaderBufferFloat32Atomics == rhs.shaderBufferFloat32Atomics )
+          && ( shaderBufferFloat32AtomicAdd == rhs.shaderBufferFloat32AtomicAdd )
+          && ( shaderBufferFloat64Atomics == rhs.shaderBufferFloat64Atomics )
+          && ( shaderBufferFloat64AtomicAdd == rhs.shaderBufferFloat64AtomicAdd )
+          && ( shaderSharedFloat32Atomics == rhs.shaderSharedFloat32Atomics )
+          && ( shaderSharedFloat32AtomicAdd == rhs.shaderSharedFloat32AtomicAdd )
+          && ( shaderSharedFloat64Atomics == rhs.shaderSharedFloat64Atomics )
+          && ( shaderSharedFloat64AtomicAdd == rhs.shaderSharedFloat64AtomicAdd )
+          && ( shaderImageFloat32Atomics == rhs.shaderImageFloat32Atomics )
+          && ( shaderImageFloat32AtomicAdd == rhs.shaderImageFloat32AtomicAdd )
+          && ( sparseImageFloat32Atomics == rhs.sparseImageFloat32Atomics )
+          && ( sparseImageFloat32AtomicAdd == rhs.sparseImageFloat32AtomicAdd );
+    }
+
+    bool operator!=( PhysicalDeviceShaderAtomicFloatFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32Atomics = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicAdd = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64Atomics = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicAdd = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32Atomics = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicAdd = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64Atomics = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicAdd = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32Atomics = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicAdd = {};
+    VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32Atomics = {};
+    VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicAdd = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceShaderAtomicFloatFeaturesEXT ) == sizeof( VkPhysicalDeviceShaderAtomicFloatFeaturesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceShaderAtomicFloatFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT>
+  {
+    using Type = PhysicalDeviceShaderAtomicFloatFeaturesEXT;
+  };
+
+  struct PhysicalDeviceShaderAtomicInt64Features
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderAtomicInt64Features;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicInt64Features(VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ = {}) VULKAN_HPP_NOEXCEPT
+    : shaderBufferInt64Atomics( shaderBufferInt64Atomics_ ), shaderSharedInt64Atomics( shaderSharedInt64Atomics_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicInt64Features( PhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceShaderAtomicInt64Features( VkPhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceShaderAtomicInt64Features & operator=( VkPhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicInt64Features const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceShaderAtomicInt64Features & operator=( PhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceShaderAtomicInt64Features ) );
+      return *this;
+    }
+
+    PhysicalDeviceShaderAtomicInt64Features & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceShaderAtomicInt64Features & setShaderBufferInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderBufferInt64Atomics = shaderBufferInt64Atomics_;
+      return *this;
+    }
+
+    PhysicalDeviceShaderAtomicInt64Features & setShaderSharedInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderSharedInt64Atomics = shaderSharedInt64Atomics_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceShaderAtomicInt64Features const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceShaderAtomicInt64Features*>( this );
+    }
+
+    operator VkPhysicalDeviceShaderAtomicInt64Features &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceShaderAtomicInt64Features*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceShaderAtomicInt64Features const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceShaderAtomicInt64Features const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shaderBufferInt64Atomics == rhs.shaderBufferInt64Atomics )
+          && ( shaderSharedInt64Atomics == rhs.shaderSharedInt64Atomics );
+    }
+
+    bool operator!=( PhysicalDeviceShaderAtomicInt64Features const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderAtomicInt64Features;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceShaderAtomicInt64Features ) == sizeof( VkPhysicalDeviceShaderAtomicInt64Features ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceShaderAtomicInt64Features>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderAtomicInt64Features>
+  {
+    using Type = PhysicalDeviceShaderAtomicInt64Features;
+  };
+  using PhysicalDeviceShaderAtomicInt64FeaturesKHR = PhysicalDeviceShaderAtomicInt64Features;
+
+  struct PhysicalDeviceShaderClockFeaturesKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderClockFeaturesKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderClockFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock_ = {}) VULKAN_HPP_NOEXCEPT
+    : shaderSubgroupClock( shaderSubgroupClock_ ), shaderDeviceClock( shaderDeviceClock_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderClockFeaturesKHR( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceShaderClockFeaturesKHR( VkPhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceShaderClockFeaturesKHR & operator=( VkPhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderClockFeaturesKHR const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceShaderClockFeaturesKHR & operator=( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceShaderClockFeaturesKHR ) );
+      return *this;
+    }
+
+    PhysicalDeviceShaderClockFeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceShaderClockFeaturesKHR & setShaderSubgroupClock( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderSubgroupClock = shaderSubgroupClock_;
+      return *this;
+    }
+
+    PhysicalDeviceShaderClockFeaturesKHR & setShaderDeviceClock( VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderDeviceClock = shaderDeviceClock_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceShaderClockFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceShaderClockFeaturesKHR*>( this );
+    }
+
+    operator VkPhysicalDeviceShaderClockFeaturesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceShaderClockFeaturesKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceShaderClockFeaturesKHR const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceShaderClockFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shaderSubgroupClock == rhs.shaderSubgroupClock )
+          && ( shaderDeviceClock == rhs.shaderDeviceClock );
+    }
+
+    bool operator!=( PhysicalDeviceShaderClockFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderClockFeaturesKHR;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceShaderClockFeaturesKHR ) == sizeof( VkPhysicalDeviceShaderClockFeaturesKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceShaderClockFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderClockFeaturesKHR>
+  {
+    using Type = PhysicalDeviceShaderClockFeaturesKHR;
+  };
+
+  struct PhysicalDeviceShaderCoreProperties2AMD
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderCoreProperties2AMD;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreProperties2AMD(VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagsAMD shaderCoreFeatures_ = {}, uint32_t activeComputeUnitCount_ = {}) VULKAN_HPP_NOEXCEPT
+    : shaderCoreFeatures( shaderCoreFeatures_ ), activeComputeUnitCount( activeComputeUnitCount_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreProperties2AMD( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceShaderCoreProperties2AMD( VkPhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceShaderCoreProperties2AMD & operator=( VkPhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreProperties2AMD const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceShaderCoreProperties2AMD & operator=( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceShaderCoreProperties2AMD ) );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceShaderCoreProperties2AMD const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceShaderCoreProperties2AMD*>( this );
+    }
+
+    operator VkPhysicalDeviceShaderCoreProperties2AMD &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceShaderCoreProperties2AMD*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceShaderCoreProperties2AMD const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceShaderCoreProperties2AMD const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shaderCoreFeatures == rhs.shaderCoreFeatures )
+          && ( activeComputeUnitCount == rhs.activeComputeUnitCount );
+    }
+
+    bool operator!=( PhysicalDeviceShaderCoreProperties2AMD const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCoreProperties2AMD;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagsAMD shaderCoreFeatures = {};
+    uint32_t activeComputeUnitCount = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceShaderCoreProperties2AMD ) == sizeof( VkPhysicalDeviceShaderCoreProperties2AMD ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceShaderCoreProperties2AMD>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderCoreProperties2AMD>
+  {
+    using Type = PhysicalDeviceShaderCoreProperties2AMD;
+  };
+
+  struct PhysicalDeviceShaderCorePropertiesAMD
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderCorePropertiesAMD;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCorePropertiesAMD(uint32_t shaderEngineCount_ = {}, uint32_t shaderArraysPerEngineCount_ = {}, uint32_t computeUnitsPerShaderArray_ = {}, uint32_t simdPerComputeUnit_ = {}, uint32_t wavefrontsPerSimd_ = {}, uint32_t wavefrontSize_ = {}, uint32_t sgprsPerSimd_ = {}, uint32_t minSgprAllocation_ = {}, uint32_t maxSgprAllocation_ = {}, uint32_t sgprAllocationGranularity_ = {}, uint32_t vgprsPerSimd_ = {}, uint32_t minVgprAllocation_ = {}, uint32_t maxVgprAllocation_ = {}, uint32_t vgprAllocationGranularity_ = {}) VULKAN_HPP_NOEXCEPT
+    : shaderEngineCount( shaderEngineCount_ ), shaderArraysPerEngineCount( shaderArraysPerEngineCount_ ), computeUnitsPerShaderArray( computeUnitsPerShaderArray_ ), simdPerComputeUnit( simdPerComputeUnit_ ), wavefrontsPerSimd( wavefrontsPerSimd_ ), wavefrontSize( wavefrontSize_ ), sgprsPerSimd( sgprsPerSimd_ ), minSgprAllocation( minSgprAllocation_ ), maxSgprAllocation( maxSgprAllocation_ ), sgprAllocationGranularity( sgprAllocationGranularity_ ), vgprsPerSimd( vgprsPerSimd_ ), minVgprAllocation( minVgprAllocation_ ), maxVgprAllocation( maxVgprAllocation_ ), vgprAllocationGranularity( vgprAllocationGranularity_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCorePropertiesAMD( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceShaderCorePropertiesAMD( VkPhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceShaderCorePropertiesAMD & operator=( VkPhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesAMD const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceShaderCorePropertiesAMD & operator=( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceShaderCorePropertiesAMD ) );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceShaderCorePropertiesAMD const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceShaderCorePropertiesAMD*>( this );
+    }
+
+    operator VkPhysicalDeviceShaderCorePropertiesAMD &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceShaderCorePropertiesAMD*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceShaderCorePropertiesAMD const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceShaderCorePropertiesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shaderEngineCount == rhs.shaderEngineCount )
+          && ( shaderArraysPerEngineCount == rhs.shaderArraysPerEngineCount )
+          && ( computeUnitsPerShaderArray == rhs.computeUnitsPerShaderArray )
+          && ( simdPerComputeUnit == rhs.simdPerComputeUnit )
+          && ( wavefrontsPerSimd == rhs.wavefrontsPerSimd )
+          && ( wavefrontSize == rhs.wavefrontSize )
+          && ( sgprsPerSimd == rhs.sgprsPerSimd )
+          && ( minSgprAllocation == rhs.minSgprAllocation )
+          && ( maxSgprAllocation == rhs.maxSgprAllocation )
+          && ( sgprAllocationGranularity == rhs.sgprAllocationGranularity )
+          && ( vgprsPerSimd == rhs.vgprsPerSimd )
+          && ( minVgprAllocation == rhs.minVgprAllocation )
+          && ( maxVgprAllocation == rhs.maxVgprAllocation )
+          && ( vgprAllocationGranularity == rhs.vgprAllocationGranularity );
+    }
+
+    bool operator!=( PhysicalDeviceShaderCorePropertiesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCorePropertiesAMD;
+    void* pNext = {};
+    uint32_t shaderEngineCount = {};
+    uint32_t shaderArraysPerEngineCount = {};
+    uint32_t computeUnitsPerShaderArray = {};
+    uint32_t simdPerComputeUnit = {};
+    uint32_t wavefrontsPerSimd = {};
+    uint32_t wavefrontSize = {};
+    uint32_t sgprsPerSimd = {};
+    uint32_t minSgprAllocation = {};
+    uint32_t maxSgprAllocation = {};
+    uint32_t sgprAllocationGranularity = {};
+    uint32_t vgprsPerSimd = {};
+    uint32_t minVgprAllocation = {};
+    uint32_t maxVgprAllocation = {};
+    uint32_t vgprAllocationGranularity = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceShaderCorePropertiesAMD ) == sizeof( VkPhysicalDeviceShaderCorePropertiesAMD ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceShaderCorePropertiesAMD>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderCorePropertiesAMD>
+  {
+    using Type = PhysicalDeviceShaderCorePropertiesAMD;
+  };
+
+  struct PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ = {}) VULKAN_HPP_NOEXCEPT
+    : shaderDemoteToHelperInvocation( shaderDemoteToHelperInvocation_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT( VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT & operator=( VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT & operator=( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT ) );
+      return *this;
+    }
+
+    PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT & setShaderDemoteToHelperInvocation( VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderDemoteToHelperInvocation = shaderDemoteToHelperInvocation_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shaderDemoteToHelperInvocation == rhs.shaderDemoteToHelperInvocation );
+    }
+
+    bool operator!=( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT ) == sizeof( VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT>
+  {
+    using Type = PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT;
+  };
+
+  struct PhysicalDeviceShaderDrawParametersFeatures
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderDrawParametersFeatures;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDrawParametersFeatures(VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ = {}) VULKAN_HPP_NOEXCEPT
+    : shaderDrawParameters( shaderDrawParameters_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDrawParametersFeatures( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceShaderDrawParametersFeatures( VkPhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceShaderDrawParametersFeatures & operator=( VkPhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDrawParametersFeatures const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceShaderDrawParametersFeatures & operator=( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceShaderDrawParametersFeatures ) );
+      return *this;
+    }
+
+    PhysicalDeviceShaderDrawParametersFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceShaderDrawParametersFeatures & setShaderDrawParameters( VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderDrawParameters = shaderDrawParameters_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceShaderDrawParametersFeatures const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceShaderDrawParametersFeatures*>( this );
+    }
+
+    operator VkPhysicalDeviceShaderDrawParametersFeatures &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceShaderDrawParametersFeatures*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceShaderDrawParametersFeatures const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceShaderDrawParametersFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shaderDrawParameters == rhs.shaderDrawParameters );
+    }
+
+    bool operator!=( PhysicalDeviceShaderDrawParametersFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderDrawParametersFeatures;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceShaderDrawParametersFeatures ) == sizeof( VkPhysicalDeviceShaderDrawParametersFeatures ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceShaderDrawParametersFeatures>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderDrawParametersFeatures>
+  {
+    using Type = PhysicalDeviceShaderDrawParametersFeatures;
+  };
+  using PhysicalDeviceShaderDrawParameterFeatures = PhysicalDeviceShaderDrawParametersFeatures;
+
+  struct PhysicalDeviceShaderFloat16Int8Features
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderFloat16Int8Features;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloat16Int8Features(VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ = {}) VULKAN_HPP_NOEXCEPT
+    : shaderFloat16( shaderFloat16_ ), shaderInt8( shaderInt8_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloat16Int8Features( PhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceShaderFloat16Int8Features( VkPhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceShaderFloat16Int8Features & operator=( VkPhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat16Int8Features const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceShaderFloat16Int8Features & operator=( PhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceShaderFloat16Int8Features ) );
+      return *this;
+    }
+
+    PhysicalDeviceShaderFloat16Int8Features & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceShaderFloat16Int8Features & setShaderFloat16( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderFloat16 = shaderFloat16_;
+      return *this;
+    }
+
+    PhysicalDeviceShaderFloat16Int8Features & setShaderInt8( VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderInt8 = shaderInt8_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceShaderFloat16Int8Features const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceShaderFloat16Int8Features*>( this );
+    }
+
+    operator VkPhysicalDeviceShaderFloat16Int8Features &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceShaderFloat16Int8Features*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceShaderFloat16Int8Features const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceShaderFloat16Int8Features const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shaderFloat16 == rhs.shaderFloat16 )
+          && ( shaderInt8 == rhs.shaderInt8 );
+    }
+
+    bool operator!=( PhysicalDeviceShaderFloat16Int8Features const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderFloat16Int8Features;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderInt8 = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceShaderFloat16Int8Features ) == sizeof( VkPhysicalDeviceShaderFloat16Int8Features ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceShaderFloat16Int8Features>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderFloat16Int8Features>
+  {
+    using Type = PhysicalDeviceShaderFloat16Int8Features;
+  };
+  using PhysicalDeviceFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8Features;
+  using PhysicalDeviceShaderFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8Features;
+
+  struct PhysicalDeviceShaderImageAtomicInt64FeaturesEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageAtomicInt64FeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 shaderImageInt64Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseImageInt64Atomics_ = {}) VULKAN_HPP_NOEXCEPT
+    : shaderImageInt64Atomics( shaderImageInt64Atomics_ ), sparseImageInt64Atomics( sparseImageInt64Atomics_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageAtomicInt64FeaturesEXT( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceShaderImageAtomicInt64FeaturesEXT( VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & operator=( VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & operator=( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT ) );
+      return *this;
+    }
+
+    PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & setShaderImageInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderImageInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderImageInt64Atomics = shaderImageInt64Atomics_;
+      return *this;
+    }
+
+    PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & setSparseImageInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 sparseImageInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sparseImageInt64Atomics = sparseImageInt64Atomics_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shaderImageInt64Atomics == rhs.shaderImageInt64Atomics )
+          && ( sparseImageInt64Atomics == rhs.sparseImageInt64Atomics );
+    }
+
+    bool operator!=( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderImageInt64Atomics = {};
+    VULKAN_HPP_NAMESPACE::Bool32 sparseImageInt64Atomics = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT ) == sizeof( VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceShaderImageAtomicInt64FeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT>
+  {
+    using Type = PhysicalDeviceShaderImageAtomicInt64FeaturesEXT;
+  };
+
+  struct PhysicalDeviceShaderImageFootprintFeaturesNV
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageFootprintFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 imageFootprint_ = {}) VULKAN_HPP_NOEXCEPT
+    : imageFootprint( imageFootprint_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageFootprintFeaturesNV( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceShaderImageFootprintFeaturesNV( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceShaderImageFootprintFeaturesNV & operator=( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageFootprintFeaturesNV const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceShaderImageFootprintFeaturesNV & operator=( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceShaderImageFootprintFeaturesNV ) );
+      return *this;
+    }
+
+    PhysicalDeviceShaderImageFootprintFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceShaderImageFootprintFeaturesNV & setImageFootprint( VULKAN_HPP_NAMESPACE::Bool32 imageFootprint_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageFootprint = imageFootprint_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceShaderImageFootprintFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceShaderImageFootprintFeaturesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceShaderImageFootprintFeaturesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceShaderImageFootprintFeaturesNV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceShaderImageFootprintFeaturesNV const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceShaderImageFootprintFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( imageFootprint == rhs.imageFootprint );
+    }
+
+    bool operator!=( PhysicalDeviceShaderImageFootprintFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 imageFootprint = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceShaderImageFootprintFeaturesNV ) == sizeof( VkPhysicalDeviceShaderImageFootprintFeaturesNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceShaderImageFootprintFeaturesNV>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV>
+  {
+    using Type = PhysicalDeviceShaderImageFootprintFeaturesNV;
+  };
+
+  struct PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL(VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2_ = {}) VULKAN_HPP_NOEXCEPT
+    : shaderIntegerFunctions2( shaderIntegerFunctions2_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & operator=( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & operator=( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ) );
+      return *this;
+    }
+
+    PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & setShaderIntegerFunctions2( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderIntegerFunctions2 = shaderIntegerFunctions2_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL*>( this );
+    }
+
+    operator VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shaderIntegerFunctions2 == rhs.shaderIntegerFunctions2 );
+    }
+
+    bool operator!=( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2 = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ) == sizeof( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL>
+  {
+    using Type = PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL;
+  };
+
+  struct PhysicalDeviceShaderSMBuiltinsFeaturesNV
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins_ = {}) VULKAN_HPP_NOEXCEPT
+    : shaderSMBuiltins( shaderSMBuiltins_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsFeaturesNV( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceShaderSMBuiltinsFeaturesNV( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceShaderSMBuiltinsFeaturesNV & operator=( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsFeaturesNV const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceShaderSMBuiltinsFeaturesNV & operator=( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceShaderSMBuiltinsFeaturesNV ) );
+      return *this;
+    }
+
+    PhysicalDeviceShaderSMBuiltinsFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceShaderSMBuiltinsFeaturesNV & setShaderSMBuiltins( VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderSMBuiltins = shaderSMBuiltins_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceShaderSMBuiltinsFeaturesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceShaderSMBuiltinsFeaturesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceShaderSMBuiltinsFeaturesNV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceShaderSMBuiltinsFeaturesNV const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceShaderSMBuiltinsFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shaderSMBuiltins == rhs.shaderSMBuiltins );
+    }
+
+    bool operator!=( PhysicalDeviceShaderSMBuiltinsFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceShaderSMBuiltinsFeaturesNV ) == sizeof( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceShaderSMBuiltinsFeaturesNV>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV>
+  {
+    using Type = PhysicalDeviceShaderSMBuiltinsFeaturesNV;
+  };
+
+  struct PhysicalDeviceShaderSMBuiltinsPropertiesNV
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsPropertiesNV(uint32_t shaderSMCount_ = {}, uint32_t shaderWarpsPerSM_ = {}) VULKAN_HPP_NOEXCEPT
+    : shaderSMCount( shaderSMCount_ ), shaderWarpsPerSM( shaderWarpsPerSM_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsPropertiesNV( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceShaderSMBuiltinsPropertiesNV( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceShaderSMBuiltinsPropertiesNV & operator=( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsPropertiesNV const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceShaderSMBuiltinsPropertiesNV & operator=( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceShaderSMBuiltinsPropertiesNV ) );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceShaderSMBuiltinsPropertiesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceShaderSMBuiltinsPropertiesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceShaderSMBuiltinsPropertiesNV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceShaderSMBuiltinsPropertiesNV const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceShaderSMBuiltinsPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shaderSMCount == rhs.shaderSMCount )
+          && ( shaderWarpsPerSM == rhs.shaderWarpsPerSM );
+    }
+
+    bool operator!=( PhysicalDeviceShaderSMBuiltinsPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV;
+    void* pNext = {};
+    uint32_t shaderSMCount = {};
+    uint32_t shaderWarpsPerSM = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceShaderSMBuiltinsPropertiesNV ) == sizeof( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceShaderSMBuiltinsPropertiesNV>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV>
+  {
+    using Type = PhysicalDeviceShaderSMBuiltinsPropertiesNV;
+  };
+
+  struct PhysicalDeviceShaderSubgroupExtendedTypesFeatures
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupExtendedTypesFeatures(VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ = {}) VULKAN_HPP_NOEXCEPT
+    : shaderSubgroupExtendedTypes( shaderSubgroupExtendedTypes_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupExtendedTypesFeatures( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceShaderSubgroupExtendedTypesFeatures( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceShaderSubgroupExtendedTypesFeatures & operator=( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupExtendedTypesFeatures const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceShaderSubgroupExtendedTypesFeatures & operator=( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceShaderSubgroupExtendedTypesFeatures ) );
+      return *this;
+    }
+
+    PhysicalDeviceShaderSubgroupExtendedTypesFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceShaderSubgroupExtendedTypesFeatures & setShaderSubgroupExtendedTypes( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderSubgroupExtendedTypes = shaderSubgroupExtendedTypes_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures*>( this );
+    }
+
+    operator VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shaderSubgroupExtendedTypes == rhs.shaderSubgroupExtendedTypes );
+    }
+
+    bool operator!=( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceShaderSubgroupExtendedTypesFeatures ) == sizeof( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceShaderSubgroupExtendedTypesFeatures>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures>
+  {
+    using Type = PhysicalDeviceShaderSubgroupExtendedTypesFeatures;
+  };
+  using PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR = PhysicalDeviceShaderSubgroupExtendedTypesFeatures;
+
+  struct PhysicalDeviceShaderTerminateInvocationFeaturesKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderTerminateInvocationFeaturesKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderTerminateInvocationFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation_ = {}) VULKAN_HPP_NOEXCEPT
+    : shaderTerminateInvocation( shaderTerminateInvocation_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderTerminateInvocationFeaturesKHR( PhysicalDeviceShaderTerminateInvocationFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceShaderTerminateInvocationFeaturesKHR( VkPhysicalDeviceShaderTerminateInvocationFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceShaderTerminateInvocationFeaturesKHR & operator=( VkPhysicalDeviceShaderTerminateInvocationFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTerminateInvocationFeaturesKHR const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceShaderTerminateInvocationFeaturesKHR & operator=( PhysicalDeviceShaderTerminateInvocationFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceShaderTerminateInvocationFeaturesKHR ) );
+      return *this;
+    }
+
+    PhysicalDeviceShaderTerminateInvocationFeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceShaderTerminateInvocationFeaturesKHR & setShaderTerminateInvocation( VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderTerminateInvocation = shaderTerminateInvocation_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceShaderTerminateInvocationFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceShaderTerminateInvocationFeaturesKHR*>( this );
+    }
+
+    operator VkPhysicalDeviceShaderTerminateInvocationFeaturesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceShaderTerminateInvocationFeaturesKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceShaderTerminateInvocationFeaturesKHR const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceShaderTerminateInvocationFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shaderTerminateInvocation == rhs.shaderTerminateInvocation );
+    }
+
+    bool operator!=( PhysicalDeviceShaderTerminateInvocationFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderTerminateInvocationFeaturesKHR;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceShaderTerminateInvocationFeaturesKHR ) == sizeof( VkPhysicalDeviceShaderTerminateInvocationFeaturesKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceShaderTerminateInvocationFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderTerminateInvocationFeaturesKHR>
+  {
+    using Type = PhysicalDeviceShaderTerminateInvocationFeaturesKHR;
+  };
+
+  struct PhysicalDeviceShadingRateImageFeaturesNV
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShadingRateImageFeaturesNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImageFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder_ = {}) VULKAN_HPP_NOEXCEPT
+    : shadingRateImage( shadingRateImage_ ), shadingRateCoarseSampleOrder( shadingRateCoarseSampleOrder_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImageFeaturesNV( PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceShadingRateImageFeaturesNV( VkPhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceShadingRateImageFeaturesNV & operator=( VkPhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImageFeaturesNV const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceShadingRateImageFeaturesNV & operator=( PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceShadingRateImageFeaturesNV ) );
+      return *this;
+    }
+
+    PhysicalDeviceShadingRateImageFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceShadingRateImageFeaturesNV & setShadingRateImage( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shadingRateImage = shadingRateImage_;
+      return *this;
+    }
+
+    PhysicalDeviceShadingRateImageFeaturesNV & setShadingRateCoarseSampleOrder( VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shadingRateCoarseSampleOrder = shadingRateCoarseSampleOrder_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceShadingRateImageFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceShadingRateImageFeaturesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceShadingRateImageFeaturesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceShadingRateImageFeaturesNV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceShadingRateImageFeaturesNV const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceShadingRateImageFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shadingRateImage == rhs.shadingRateImage )
+          && ( shadingRateCoarseSampleOrder == rhs.shadingRateCoarseSampleOrder );
+    }
+
+    bool operator!=( PhysicalDeviceShadingRateImageFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShadingRateImageFeaturesNV;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceShadingRateImageFeaturesNV ) == sizeof( VkPhysicalDeviceShadingRateImageFeaturesNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceShadingRateImageFeaturesNV>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceShadingRateImageFeaturesNV>
+  {
+    using Type = PhysicalDeviceShadingRateImageFeaturesNV;
+  };
+
+  struct PhysicalDeviceShadingRateImagePropertiesNV
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShadingRateImagePropertiesNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImagePropertiesNV(VULKAN_HPP_NAMESPACE::Extent2D shadingRateTexelSize_ = {}, uint32_t shadingRatePaletteSize_ = {}, uint32_t shadingRateMaxCoarseSamples_ = {}) VULKAN_HPP_NOEXCEPT
+    : shadingRateTexelSize( shadingRateTexelSize_ ), shadingRatePaletteSize( shadingRatePaletteSize_ ), shadingRateMaxCoarseSamples( shadingRateMaxCoarseSamples_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImagePropertiesNV( PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceShadingRateImagePropertiesNV( VkPhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceShadingRateImagePropertiesNV & operator=( VkPhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImagePropertiesNV const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceShadingRateImagePropertiesNV & operator=( PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceShadingRateImagePropertiesNV ) );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceShadingRateImagePropertiesNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceShadingRateImagePropertiesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceShadingRateImagePropertiesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceShadingRateImagePropertiesNV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceShadingRateImagePropertiesNV const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceShadingRateImagePropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shadingRateTexelSize == rhs.shadingRateTexelSize )
+          && ( shadingRatePaletteSize == rhs.shadingRatePaletteSize )
+          && ( shadingRateMaxCoarseSamples == rhs.shadingRateMaxCoarseSamples );
+    }
+
+    bool operator!=( PhysicalDeviceShadingRateImagePropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShadingRateImagePropertiesNV;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Extent2D shadingRateTexelSize = {};
+    uint32_t shadingRatePaletteSize = {};
+    uint32_t shadingRateMaxCoarseSamples = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceShadingRateImagePropertiesNV ) == sizeof( VkPhysicalDeviceShadingRateImagePropertiesNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceShadingRateImagePropertiesNV>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceShadingRateImagePropertiesNV>
+  {
+    using Type = PhysicalDeviceShadingRateImagePropertiesNV;
+  };
+
+  struct PhysicalDeviceSubgroupProperties
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubgroupProperties;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupProperties(uint32_t subgroupSize_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedStages_ = {}, VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags supportedOperations_ = {}, VULKAN_HPP_NAMESPACE::Bool32 quadOperationsInAllStages_ = {}) VULKAN_HPP_NOEXCEPT
+    : subgroupSize( subgroupSize_ ), supportedStages( supportedStages_ ), supportedOperations( supportedOperations_ ), quadOperationsInAllStages( quadOperationsInAllStages_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupProperties( PhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceSubgroupProperties( VkPhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceSubgroupProperties & operator=( VkPhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupProperties const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceSubgroupProperties & operator=( PhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceSubgroupProperties ) );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceSubgroupProperties const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceSubgroupProperties*>( this );
+    }
+
+    operator VkPhysicalDeviceSubgroupProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceSubgroupProperties*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceSubgroupProperties const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceSubgroupProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( subgroupSize == rhs.subgroupSize )
+          && ( supportedStages == rhs.supportedStages )
+          && ( supportedOperations == rhs.supportedOperations )
+          && ( quadOperationsInAllStages == rhs.quadOperationsInAllStages );
+    }
+
+    bool operator!=( PhysicalDeviceSubgroupProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupProperties;
+    void* pNext = {};
+    uint32_t subgroupSize = {};
+    VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedStages = {};
+    VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags supportedOperations = {};
+    VULKAN_HPP_NAMESPACE::Bool32 quadOperationsInAllStages = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceSubgroupProperties ) == sizeof( VkPhysicalDeviceSubgroupProperties ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceSubgroupProperties>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceSubgroupProperties>
+  {
+    using Type = PhysicalDeviceSubgroupProperties;
+  };
+
+  struct PhysicalDeviceSubgroupSizeControlFeaturesEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubgroupSizeControlFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ = {}, VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ = {}) VULKAN_HPP_NOEXCEPT
+    : subgroupSizeControl( subgroupSizeControl_ ), computeFullSubgroups( computeFullSubgroups_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlFeaturesEXT( PhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceSubgroupSizeControlFeaturesEXT( VkPhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceSubgroupSizeControlFeaturesEXT & operator=( VkPhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceSubgroupSizeControlFeaturesEXT & operator=( PhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceSubgroupSizeControlFeaturesEXT ) );
+      return *this;
+    }
+
+    PhysicalDeviceSubgroupSizeControlFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceSubgroupSizeControlFeaturesEXT & setSubgroupSizeControl( VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ ) VULKAN_HPP_NOEXCEPT
+    {
+      subgroupSizeControl = subgroupSizeControl_;
+      return *this;
+    }
+
+    PhysicalDeviceSubgroupSizeControlFeaturesEXT & setComputeFullSubgroups( VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ ) VULKAN_HPP_NOEXCEPT
+    {
+      computeFullSubgroups = computeFullSubgroups_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceSubgroupSizeControlFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceSubgroupSizeControlFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceSubgroupSizeControlFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceSubgroupSizeControlFeaturesEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceSubgroupSizeControlFeaturesEXT const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceSubgroupSizeControlFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( subgroupSizeControl == rhs.subgroupSizeControl )
+          && ( computeFullSubgroups == rhs.computeFullSubgroups );
+    }
+
+    bool operator!=( PhysicalDeviceSubgroupSizeControlFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupSizeControlFeaturesEXT;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl = {};
+    VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceSubgroupSizeControlFeaturesEXT ) == sizeof( VkPhysicalDeviceSubgroupSizeControlFeaturesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceSubgroupSizeControlFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceSubgroupSizeControlFeaturesEXT>
+  {
+    using Type = PhysicalDeviceSubgroupSizeControlFeaturesEXT;
+  };
+
+  struct PhysicalDeviceSubgroupSizeControlPropertiesEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubgroupSizeControlPropertiesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlPropertiesEXT(uint32_t minSubgroupSize_ = {}, uint32_t maxSubgroupSize_ = {}, uint32_t maxComputeWorkgroupSubgroups_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages_ = {}) VULKAN_HPP_NOEXCEPT
+    : minSubgroupSize( minSubgroupSize_ ), maxSubgroupSize( maxSubgroupSize_ ), maxComputeWorkgroupSubgroups( maxComputeWorkgroupSubgroups_ ), requiredSubgroupSizeStages( requiredSubgroupSizeStages_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlPropertiesEXT( PhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceSubgroupSizeControlPropertiesEXT( VkPhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceSubgroupSizeControlPropertiesEXT & operator=( VkPhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlPropertiesEXT const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceSubgroupSizeControlPropertiesEXT & operator=( PhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceSubgroupSizeControlPropertiesEXT ) );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceSubgroupSizeControlPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceSubgroupSizeControlPropertiesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceSubgroupSizeControlPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceSubgroupSizeControlPropertiesEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceSubgroupSizeControlPropertiesEXT const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceSubgroupSizeControlPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( minSubgroupSize == rhs.minSubgroupSize )
+          && ( maxSubgroupSize == rhs.maxSubgroupSize )
+          && ( maxComputeWorkgroupSubgroups == rhs.maxComputeWorkgroupSubgroups )
+          && ( requiredSubgroupSizeStages == rhs.requiredSubgroupSizeStages );
+    }
+
+    bool operator!=( PhysicalDeviceSubgroupSizeControlPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupSizeControlPropertiesEXT;
+    void* pNext = {};
+    uint32_t minSubgroupSize = {};
+    uint32_t maxSubgroupSize = {};
+    uint32_t maxComputeWorkgroupSubgroups = {};
+    VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceSubgroupSizeControlPropertiesEXT ) == sizeof( VkPhysicalDeviceSubgroupSizeControlPropertiesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceSubgroupSizeControlPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceSubgroupSizeControlPropertiesEXT>
+  {
+    using Type = PhysicalDeviceSubgroupSizeControlPropertiesEXT;
+  };
+
+  struct PhysicalDeviceTexelBufferAlignmentFeaturesEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment_ = {}) VULKAN_HPP_NOEXCEPT
+    : texelBufferAlignment( texelBufferAlignment_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentFeaturesEXT( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceTexelBufferAlignmentFeaturesEXT( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceTexelBufferAlignmentFeaturesEXT & operator=( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceTexelBufferAlignmentFeaturesEXT & operator=( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceTexelBufferAlignmentFeaturesEXT ) );
+      return *this;
+    }
+
+    PhysicalDeviceTexelBufferAlignmentFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceTexelBufferAlignmentFeaturesEXT & setTexelBufferAlignment( VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment_ ) VULKAN_HPP_NOEXCEPT
+    {
+      texelBufferAlignment = texelBufferAlignment_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( texelBufferAlignment == rhs.texelBufferAlignment );
+    }
+
+    bool operator!=( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceTexelBufferAlignmentFeaturesEXT ) == sizeof( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceTexelBufferAlignmentFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT>
+  {
+    using Type = PhysicalDeviceTexelBufferAlignmentFeaturesEXT;
+  };
+
+  struct PhysicalDeviceTexelBufferAlignmentPropertiesEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTexelBufferAlignmentPropertiesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentPropertiesEXT(VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes_ = {}, VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment_ = {}) VULKAN_HPP_NOEXCEPT
+    : storageTexelBufferOffsetAlignmentBytes( storageTexelBufferOffsetAlignmentBytes_ ), storageTexelBufferOffsetSingleTexelAlignment( storageTexelBufferOffsetSingleTexelAlignment_ ), uniformTexelBufferOffsetAlignmentBytes( uniformTexelBufferOffsetAlignmentBytes_ ), uniformTexelBufferOffsetSingleTexelAlignment( uniformTexelBufferOffsetSingleTexelAlignment_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentPropertiesEXT( PhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceTexelBufferAlignmentPropertiesEXT( VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceTexelBufferAlignmentPropertiesEXT & operator=( VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentPropertiesEXT const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceTexelBufferAlignmentPropertiesEXT & operator=( PhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceTexelBufferAlignmentPropertiesEXT ) );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceTexelBufferAlignmentPropertiesEXT const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceTexelBufferAlignmentPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( storageTexelBufferOffsetAlignmentBytes == rhs.storageTexelBufferOffsetAlignmentBytes )
+          && ( storageTexelBufferOffsetSingleTexelAlignment == rhs.storageTexelBufferOffsetSingleTexelAlignment )
+          && ( uniformTexelBufferOffsetAlignmentBytes == rhs.uniformTexelBufferOffsetAlignmentBytes )
+          && ( uniformTexelBufferOffsetSingleTexelAlignment == rhs.uniformTexelBufferOffsetSingleTexelAlignment );
+    }
+
+    bool operator!=( PhysicalDeviceTexelBufferAlignmentPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTexelBufferAlignmentPropertiesEXT;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes = {};
+    VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes = {};
+    VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceTexelBufferAlignmentPropertiesEXT ) == sizeof( VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceTexelBufferAlignmentPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceTexelBufferAlignmentPropertiesEXT>
+  {
+    using Type = PhysicalDeviceTexelBufferAlignmentPropertiesEXT;
+  };
+
+  struct PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ = {}) VULKAN_HPP_NOEXCEPT
+    : textureCompressionASTC_HDR( textureCompressionASTC_HDR_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT( VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT & operator=( VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT & operator=( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT ) );
+      return *this;
+    }
+
+    PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT & setTextureCompressionASTC_HDR( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ ) VULKAN_HPP_NOEXCEPT
+    {
+      textureCompressionASTC_HDR = textureCompressionASTC_HDR_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( textureCompressionASTC_HDR == rhs.textureCompressionASTC_HDR );
+    }
+
+    bool operator!=( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeaturesEXT;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT ) == sizeof( VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeaturesEXT>
+  {
+    using Type = PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT;
+  };
+
+  struct PhysicalDeviceTimelineSemaphoreFeatures
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTimelineSemaphoreFeatures;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreFeatures(VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ = {}) VULKAN_HPP_NOEXCEPT
+    : timelineSemaphore( timelineSemaphore_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreFeatures( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceTimelineSemaphoreFeatures( VkPhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceTimelineSemaphoreFeatures & operator=( VkPhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreFeatures const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceTimelineSemaphoreFeatures & operator=( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceTimelineSemaphoreFeatures ) );
+      return *this;
+    }
+
+    PhysicalDeviceTimelineSemaphoreFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceTimelineSemaphoreFeatures & setTimelineSemaphore( VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ ) VULKAN_HPP_NOEXCEPT
+    {
+      timelineSemaphore = timelineSemaphore_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceTimelineSemaphoreFeatures const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceTimelineSemaphoreFeatures*>( this );
+    }
+
+    operator VkPhysicalDeviceTimelineSemaphoreFeatures &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceTimelineSemaphoreFeatures*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceTimelineSemaphoreFeatures const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceTimelineSemaphoreFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( timelineSemaphore == rhs.timelineSemaphore );
+    }
+
+    bool operator!=( PhysicalDeviceTimelineSemaphoreFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTimelineSemaphoreFeatures;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceTimelineSemaphoreFeatures ) == sizeof( VkPhysicalDeviceTimelineSemaphoreFeatures ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceTimelineSemaphoreFeatures>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceTimelineSemaphoreFeatures>
+  {
+    using Type = PhysicalDeviceTimelineSemaphoreFeatures;
+  };
+  using PhysicalDeviceTimelineSemaphoreFeaturesKHR = PhysicalDeviceTimelineSemaphoreFeatures;
+
+  struct PhysicalDeviceTimelineSemaphoreProperties
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTimelineSemaphoreProperties;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreProperties(uint64_t maxTimelineSemaphoreValueDifference_ = {}) VULKAN_HPP_NOEXCEPT
+    : maxTimelineSemaphoreValueDifference( maxTimelineSemaphoreValueDifference_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreProperties( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceTimelineSemaphoreProperties( VkPhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceTimelineSemaphoreProperties & operator=( VkPhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreProperties const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceTimelineSemaphoreProperties & operator=( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceTimelineSemaphoreProperties ) );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceTimelineSemaphoreProperties const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceTimelineSemaphoreProperties*>( this );
+    }
+
+    operator VkPhysicalDeviceTimelineSemaphoreProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceTimelineSemaphoreProperties*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceTimelineSemaphoreProperties const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceTimelineSemaphoreProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxTimelineSemaphoreValueDifference == rhs.maxTimelineSemaphoreValueDifference );
+    }
+
+    bool operator!=( PhysicalDeviceTimelineSemaphoreProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTimelineSemaphoreProperties;
+    void* pNext = {};
+    uint64_t maxTimelineSemaphoreValueDifference = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceTimelineSemaphoreProperties ) == sizeof( VkPhysicalDeviceTimelineSemaphoreProperties ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceTimelineSemaphoreProperties>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceTimelineSemaphoreProperties>
+  {
+    using Type = PhysicalDeviceTimelineSemaphoreProperties;
+  };
+  using PhysicalDeviceTimelineSemaphorePropertiesKHR = PhysicalDeviceTimelineSemaphoreProperties;
+
+  struct PhysicalDeviceTransformFeedbackFeaturesEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 transformFeedback_ = {}, VULKAN_HPP_NAMESPACE::Bool32 geometryStreams_ = {}) VULKAN_HPP_NOEXCEPT
+    : transformFeedback( transformFeedback_ ), geometryStreams( geometryStreams_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackFeaturesEXT( PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceTransformFeedbackFeaturesEXT( VkPhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceTransformFeedbackFeaturesEXT & operator=( VkPhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceTransformFeedbackFeaturesEXT & operator=( PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceTransformFeedbackFeaturesEXT ) );
+      return *this;
+    }
+
+    PhysicalDeviceTransformFeedbackFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceTransformFeedbackFeaturesEXT & setTransformFeedback( VULKAN_HPP_NAMESPACE::Bool32 transformFeedback_ ) VULKAN_HPP_NOEXCEPT
+    {
+      transformFeedback = transformFeedback_;
+      return *this;
+    }
+
+    PhysicalDeviceTransformFeedbackFeaturesEXT & setGeometryStreams( VULKAN_HPP_NAMESPACE::Bool32 geometryStreams_ ) VULKAN_HPP_NOEXCEPT
+    {
+      geometryStreams = geometryStreams_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceTransformFeedbackFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceTransformFeedbackFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceTransformFeedbackFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceTransformFeedbackFeaturesEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceTransformFeedbackFeaturesEXT const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceTransformFeedbackFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( transformFeedback == rhs.transformFeedback )
+          && ( geometryStreams == rhs.geometryStreams );
+    }
+
+    bool operator!=( PhysicalDeviceTransformFeedbackFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 transformFeedback = {};
+    VULKAN_HPP_NAMESPACE::Bool32 geometryStreams = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceTransformFeedbackFeaturesEXT ) == sizeof( VkPhysicalDeviceTransformFeedbackFeaturesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceTransformFeedbackFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT>
+  {
+    using Type = PhysicalDeviceTransformFeedbackFeaturesEXT;
+  };
+
+  struct PhysicalDeviceTransformFeedbackPropertiesEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackPropertiesEXT(uint32_t maxTransformFeedbackStreams_ = {}, uint32_t maxTransformFeedbackBuffers_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize maxTransformFeedbackBufferSize_ = {}, uint32_t maxTransformFeedbackStreamDataSize_ = {}, uint32_t maxTransformFeedbackBufferDataSize_ = {}, uint32_t maxTransformFeedbackBufferDataStride_ = {}, VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackQueries_ = {}, VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackStreamsLinesTriangles_ = {}, VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackRasterizationStreamSelect_ = {}, VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackDraw_ = {}) VULKAN_HPP_NOEXCEPT
+    : maxTransformFeedbackStreams( maxTransformFeedbackStreams_ ), maxTransformFeedbackBuffers( maxTransformFeedbackBuffers_ ), maxTransformFeedbackBufferSize( maxTransformFeedbackBufferSize_ ), maxTransformFeedbackStreamDataSize( maxTransformFeedbackStreamDataSize_ ), maxTransformFeedbackBufferDataSize( maxTransformFeedbackBufferDataSize_ ), maxTransformFeedbackBufferDataStride( maxTransformFeedbackBufferDataStride_ ), transformFeedbackQueries( transformFeedbackQueries_ ), transformFeedbackStreamsLinesTriangles( transformFeedbackStreamsLinesTriangles_ ), transformFeedbackRasterizationStreamSelect( transformFeedbackRasterizationStreamSelect_ ), transformFeedbackDraw( transformFeedbackDraw_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackPropertiesEXT( PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceTransformFeedbackPropertiesEXT( VkPhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceTransformFeedbackPropertiesEXT & operator=( VkPhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackPropertiesEXT const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceTransformFeedbackPropertiesEXT & operator=( PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceTransformFeedbackPropertiesEXT ) );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceTransformFeedbackPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceTransformFeedbackPropertiesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceTransformFeedbackPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceTransformFeedbackPropertiesEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceTransformFeedbackPropertiesEXT const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceTransformFeedbackPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxTransformFeedbackStreams == rhs.maxTransformFeedbackStreams )
+          && ( maxTransformFeedbackBuffers == rhs.maxTransformFeedbackBuffers )
+          && ( maxTransformFeedbackBufferSize == rhs.maxTransformFeedbackBufferSize )
+          && ( maxTransformFeedbackStreamDataSize == rhs.maxTransformFeedbackStreamDataSize )
+          && ( maxTransformFeedbackBufferDataSize == rhs.maxTransformFeedbackBufferDataSize )
+          && ( maxTransformFeedbackBufferDataStride == rhs.maxTransformFeedbackBufferDataStride )
+          && ( transformFeedbackQueries == rhs.transformFeedbackQueries )
+          && ( transformFeedbackStreamsLinesTriangles == rhs.transformFeedbackStreamsLinesTriangles )
+          && ( transformFeedbackRasterizationStreamSelect == rhs.transformFeedbackRasterizationStreamSelect )
+          && ( transformFeedbackDraw == rhs.transformFeedbackDraw );
+    }
+
+    bool operator!=( PhysicalDeviceTransformFeedbackPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT;
+    void* pNext = {};
+    uint32_t maxTransformFeedbackStreams = {};
+    uint32_t maxTransformFeedbackBuffers = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize maxTransformFeedbackBufferSize = {};
+    uint32_t maxTransformFeedbackStreamDataSize = {};
+    uint32_t maxTransformFeedbackBufferDataSize = {};
+    uint32_t maxTransformFeedbackBufferDataStride = {};
+    VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackQueries = {};
+    VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackStreamsLinesTriangles = {};
+    VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackRasterizationStreamSelect = {};
+    VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackDraw = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceTransformFeedbackPropertiesEXT ) == sizeof( VkPhysicalDeviceTransformFeedbackPropertiesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceTransformFeedbackPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT>
+  {
+    using Type = PhysicalDeviceTransformFeedbackPropertiesEXT;
+  };
+
+  struct PhysicalDeviceUniformBufferStandardLayoutFeatures
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceUniformBufferStandardLayoutFeatures(VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ = {}) VULKAN_HPP_NOEXCEPT
+    : uniformBufferStandardLayout( uniformBufferStandardLayout_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceUniformBufferStandardLayoutFeatures( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceUniformBufferStandardLayoutFeatures( VkPhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceUniformBufferStandardLayoutFeatures & operator=( VkPhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceUniformBufferStandardLayoutFeatures const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceUniformBufferStandardLayoutFeatures & operator=( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceUniformBufferStandardLayoutFeatures ) );
+      return *this;
+    }
+
+    PhysicalDeviceUniformBufferStandardLayoutFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceUniformBufferStandardLayoutFeatures & setUniformBufferStandardLayout( VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      uniformBufferStandardLayout = uniformBufferStandardLayout_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceUniformBufferStandardLayoutFeatures const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceUniformBufferStandardLayoutFeatures*>( this );
+    }
+
+    operator VkPhysicalDeviceUniformBufferStandardLayoutFeatures &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceUniformBufferStandardLayoutFeatures*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceUniformBufferStandardLayoutFeatures const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceUniformBufferStandardLayoutFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( uniformBufferStandardLayout == rhs.uniformBufferStandardLayout );
+    }
+
+    bool operator!=( PhysicalDeviceUniformBufferStandardLayoutFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceUniformBufferStandardLayoutFeatures ) == sizeof( VkPhysicalDeviceUniformBufferStandardLayoutFeatures ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceUniformBufferStandardLayoutFeatures>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures>
+  {
+    using Type = PhysicalDeviceUniformBufferStandardLayoutFeatures;
+  };
+  using PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR = PhysicalDeviceUniformBufferStandardLayoutFeatures;
+
+  struct PhysicalDeviceVariablePointersFeatures
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVariablePointersFeatures;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceVariablePointersFeatures(VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ = {}, VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ = {}) VULKAN_HPP_NOEXCEPT
+    : variablePointersStorageBuffer( variablePointersStorageBuffer_ ), variablePointers( variablePointers_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceVariablePointersFeatures( PhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceVariablePointersFeatures( VkPhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceVariablePointersFeatures & operator=( VkPhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointersFeatures const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceVariablePointersFeatures & operator=( PhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceVariablePointersFeatures ) );
+      return *this;
+    }
+
+    PhysicalDeviceVariablePointersFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceVariablePointersFeatures & setVariablePointersStorageBuffer( VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      variablePointersStorageBuffer = variablePointersStorageBuffer_;
+      return *this;
+    }
+
+    PhysicalDeviceVariablePointersFeatures & setVariablePointers( VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ ) VULKAN_HPP_NOEXCEPT
+    {
+      variablePointers = variablePointers_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceVariablePointersFeatures const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceVariablePointersFeatures*>( this );
+    }
+
+    operator VkPhysicalDeviceVariablePointersFeatures &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceVariablePointersFeatures*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceVariablePointersFeatures const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceVariablePointersFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( variablePointersStorageBuffer == rhs.variablePointersStorageBuffer )
+          && ( variablePointers == rhs.variablePointers );
+    }
+
+    bool operator!=( PhysicalDeviceVariablePointersFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVariablePointersFeatures;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer = {};
+    VULKAN_HPP_NAMESPACE::Bool32 variablePointers = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceVariablePointersFeatures ) == sizeof( VkPhysicalDeviceVariablePointersFeatures ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceVariablePointersFeatures>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceVariablePointersFeatures>
+  {
+    using Type = PhysicalDeviceVariablePointersFeatures;
+  };
+  using PhysicalDeviceVariablePointerFeatures = PhysicalDeviceVariablePointersFeatures;
+  using PhysicalDeviceVariablePointerFeaturesKHR = PhysicalDeviceVariablePointersFeatures;
+  using PhysicalDeviceVariablePointersFeaturesKHR = PhysicalDeviceVariablePointersFeatures;
+
+  struct PhysicalDeviceVertexAttributeDivisorFeaturesEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor_ = {}) VULKAN_HPP_NOEXCEPT
+    : vertexAttributeInstanceRateDivisor( vertexAttributeInstanceRateDivisor_ ), vertexAttributeInstanceRateZeroDivisor( vertexAttributeInstanceRateZeroDivisor_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorFeaturesEXT( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceVertexAttributeDivisorFeaturesEXT( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceVertexAttributeDivisorFeaturesEXT & operator=( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceVertexAttributeDivisorFeaturesEXT & operator=( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceVertexAttributeDivisorFeaturesEXT ) );
+      return *this;
+    }
+
+    PhysicalDeviceVertexAttributeDivisorFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceVertexAttributeDivisorFeaturesEXT & setVertexAttributeInstanceRateDivisor( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vertexAttributeInstanceRateDivisor = vertexAttributeInstanceRateDivisor_;
+      return *this;
+    }
+
+    PhysicalDeviceVertexAttributeDivisorFeaturesEXT & setVertexAttributeInstanceRateZeroDivisor( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vertexAttributeInstanceRateZeroDivisor = vertexAttributeInstanceRateZeroDivisor_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( vertexAttributeInstanceRateDivisor == rhs.vertexAttributeInstanceRateDivisor )
+          && ( vertexAttributeInstanceRateZeroDivisor == rhs.vertexAttributeInstanceRateZeroDivisor );
+    }
+
+    bool operator!=( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor = {};
+    VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceVertexAttributeDivisorFeaturesEXT ) == sizeof( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceVertexAttributeDivisorFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT>
+  {
+    using Type = PhysicalDeviceVertexAttributeDivisorFeaturesEXT;
+  };
+
+  struct PhysicalDeviceVertexAttributeDivisorPropertiesEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorPropertiesEXT(uint32_t maxVertexAttribDivisor_ = {}) VULKAN_HPP_NOEXCEPT
+    : maxVertexAttribDivisor( maxVertexAttribDivisor_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorPropertiesEXT( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceVertexAttributeDivisorPropertiesEXT( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceVertexAttributeDivisorPropertiesEXT & operator=( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesEXT const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceVertexAttributeDivisorPropertiesEXT & operator=( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceVertexAttributeDivisorPropertiesEXT ) );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxVertexAttribDivisor == rhs.maxVertexAttribDivisor );
+    }
+
+    bool operator!=( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT;
+    void* pNext = {};
+    uint32_t maxVertexAttribDivisor = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceVertexAttributeDivisorPropertiesEXT ) == sizeof( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceVertexAttributeDivisorPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT>
+  {
+    using Type = PhysicalDeviceVertexAttributeDivisorPropertiesEXT;
+  };
+
+  struct PhysicalDeviceVulkan11Features
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan11Features;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan11Features(VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiview_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ = {}, VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ = {}, VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ = {}, VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ = {}) VULKAN_HPP_NOEXCEPT
+    : storageBuffer16BitAccess( storageBuffer16BitAccess_ ), uniformAndStorageBuffer16BitAccess( uniformAndStorageBuffer16BitAccess_ ), storagePushConstant16( storagePushConstant16_ ), storageInputOutput16( storageInputOutput16_ ), multiview( multiview_ ), multiviewGeometryShader( multiviewGeometryShader_ ), multiviewTessellationShader( multiviewTessellationShader_ ), variablePointersStorageBuffer( variablePointersStorageBuffer_ ), variablePointers( variablePointers_ ), protectedMemory( protectedMemory_ ), samplerYcbcrConversion( samplerYcbcrConversion_ ), shaderDrawParameters( shaderDrawParameters_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan11Features( PhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceVulkan11Features( VkPhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceVulkan11Features & operator=( VkPhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Features const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceVulkan11Features & operator=( PhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceVulkan11Features ) );
+      return *this;
+    }
+
+    PhysicalDeviceVulkan11Features & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkan11Features & setStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT
+    {
+      storageBuffer16BitAccess = storageBuffer16BitAccess_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkan11Features & setUniformAndStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT
+    {
+      uniformAndStorageBuffer16BitAccess = uniformAndStorageBuffer16BitAccess_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkan11Features & setStoragePushConstant16( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ ) VULKAN_HPP_NOEXCEPT
+    {
+      storagePushConstant16 = storagePushConstant16_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkan11Features & setStorageInputOutput16( VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ ) VULKAN_HPP_NOEXCEPT
+    {
+      storageInputOutput16 = storageInputOutput16_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkan11Features & setMultiview( VULKAN_HPP_NAMESPACE::Bool32 multiview_ ) VULKAN_HPP_NOEXCEPT
+    {
+      multiview = multiview_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkan11Features & setMultiviewGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ ) VULKAN_HPP_NOEXCEPT
+    {
+      multiviewGeometryShader = multiviewGeometryShader_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkan11Features & setMultiviewTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ ) VULKAN_HPP_NOEXCEPT
+    {
+      multiviewTessellationShader = multiviewTessellationShader_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkan11Features & setVariablePointersStorageBuffer( VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      variablePointersStorageBuffer = variablePointersStorageBuffer_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkan11Features & setVariablePointers( VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ ) VULKAN_HPP_NOEXCEPT
+    {
+      variablePointers = variablePointers_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkan11Features & setProtectedMemory( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ ) VULKAN_HPP_NOEXCEPT
+    {
+      protectedMemory = protectedMemory_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkan11Features & setSamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ ) VULKAN_HPP_NOEXCEPT
+    {
+      samplerYcbcrConversion = samplerYcbcrConversion_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkan11Features & setShaderDrawParameters( VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderDrawParameters = shaderDrawParameters_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceVulkan11Features const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceVulkan11Features*>( this );
+    }
+
+    operator VkPhysicalDeviceVulkan11Features &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceVulkan11Features*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceVulkan11Features const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceVulkan11Features const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( storageBuffer16BitAccess == rhs.storageBuffer16BitAccess )
+          && ( uniformAndStorageBuffer16BitAccess == rhs.uniformAndStorageBuffer16BitAccess )
+          && ( storagePushConstant16 == rhs.storagePushConstant16 )
+          && ( storageInputOutput16 == rhs.storageInputOutput16 )
+          && ( multiview == rhs.multiview )
+          && ( multiviewGeometryShader == rhs.multiviewGeometryShader )
+          && ( multiviewTessellationShader == rhs.multiviewTessellationShader )
+          && ( variablePointersStorageBuffer == rhs.variablePointersStorageBuffer )
+          && ( variablePointers == rhs.variablePointers )
+          && ( protectedMemory == rhs.protectedMemory )
+          && ( samplerYcbcrConversion == rhs.samplerYcbcrConversion )
+          && ( shaderDrawParameters == rhs.shaderDrawParameters );
+    }
+
+    bool operator!=( PhysicalDeviceVulkan11Features const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan11Features;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess = {};
+    VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess = {};
+    VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 multiview = {};
+    VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader = {};
+    VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader = {};
+    VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer = {};
+    VULKAN_HPP_NAMESPACE::Bool32 variablePointers = {};
+    VULKAN_HPP_NAMESPACE::Bool32 protectedMemory = {};
+    VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceVulkan11Features ) == sizeof( VkPhysicalDeviceVulkan11Features ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceVulkan11Features>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceVulkan11Features>
+  {
+    using Type = PhysicalDeviceVulkan11Features;
+  };
+
+  struct PhysicalDeviceVulkan11Properties
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan11Properties;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Properties(std::array<uint8_t,VK_UUID_SIZE> const& deviceUUID_ = {}, std::array<uint8_t,VK_UUID_SIZE> const& driverUUID_ = {}, std::array<uint8_t,VK_LUID_SIZE> const& deviceLUID_ = {}, uint32_t deviceNodeMask_ = {}, VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid_ = {}, uint32_t subgroupSize_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlags subgroupSupportedStages_ = {}, VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags subgroupSupportedOperations_ = {}, VULKAN_HPP_NAMESPACE::Bool32 subgroupQuadOperationsInAllStages_ = {}, VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior_ = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes, uint32_t maxMultiviewViewCount_ = {}, uint32_t maxMultiviewInstanceIndex_ = {}, VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault_ = {}, uint32_t maxPerSetDescriptors_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize_ = {}) VULKAN_HPP_NOEXCEPT
+    : deviceUUID( deviceUUID_ ), driverUUID( driverUUID_ ), deviceLUID( deviceLUID_ ), deviceNodeMask( deviceNodeMask_ ), deviceLUIDValid( deviceLUIDValid_ ), subgroupSize( subgroupSize_ ), subgroupSupportedStages( subgroupSupportedStages_ ), subgroupSupportedOperations( subgroupSupportedOperations_ ), subgroupQuadOperationsInAllStages( subgroupQuadOperationsInAllStages_ ), pointClippingBehavior( pointClippingBehavior_ ), maxMultiviewViewCount( maxMultiviewViewCount_ ), maxMultiviewInstanceIndex( maxMultiviewInstanceIndex_ ), protectedNoFault( protectedNoFault_ ), maxPerSetDescriptors( maxPerSetDescriptors_ ), maxMemoryAllocationSize( maxMemoryAllocationSize_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Properties( PhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceVulkan11Properties( VkPhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceVulkan11Properties & operator=( VkPhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Properties const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceVulkan11Properties & operator=( PhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceVulkan11Properties ) );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceVulkan11Properties const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceVulkan11Properties*>( this );
+    }
+
+    operator VkPhysicalDeviceVulkan11Properties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceVulkan11Properties*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceVulkan11Properties const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceVulkan11Properties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( deviceUUID == rhs.deviceUUID )
+          && ( driverUUID == rhs.driverUUID )
+          && ( deviceLUID == rhs.deviceLUID )
+          && ( deviceNodeMask == rhs.deviceNodeMask )
+          && ( deviceLUIDValid == rhs.deviceLUIDValid )
+          && ( subgroupSize == rhs.subgroupSize )
+          && ( subgroupSupportedStages == rhs.subgroupSupportedStages )
+          && ( subgroupSupportedOperations == rhs.subgroupSupportedOperations )
+          && ( subgroupQuadOperationsInAllStages == rhs.subgroupQuadOperationsInAllStages )
+          && ( pointClippingBehavior == rhs.pointClippingBehavior )
+          && ( maxMultiviewViewCount == rhs.maxMultiviewViewCount )
+          && ( maxMultiviewInstanceIndex == rhs.maxMultiviewInstanceIndex )
+          && ( protectedNoFault == rhs.protectedNoFault )
+          && ( maxPerSetDescriptors == rhs.maxPerSetDescriptors )
+          && ( maxMemoryAllocationSize == rhs.maxMemoryAllocationSize );
+    }
+
+    bool operator!=( PhysicalDeviceVulkan11Properties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan11Properties;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> deviceUUID = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> driverUUID = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_LUID_SIZE> deviceLUID = {};
+    uint32_t deviceNodeMask = {};
+    VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid = {};
+    uint32_t subgroupSize = {};
+    VULKAN_HPP_NAMESPACE::ShaderStageFlags subgroupSupportedStages = {};
+    VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags subgroupSupportedOperations = {};
+    VULKAN_HPP_NAMESPACE::Bool32 subgroupQuadOperationsInAllStages = {};
+    VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes;
+    uint32_t maxMultiviewViewCount = {};
+    uint32_t maxMultiviewInstanceIndex = {};
+    VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault = {};
+    uint32_t maxPerSetDescriptors = {};
+    VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceVulkan11Properties ) == sizeof( VkPhysicalDeviceVulkan11Properties ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceVulkan11Properties>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceVulkan11Properties>
+  {
+    using Type = PhysicalDeviceVulkan11Properties;
+  };
+
+  struct PhysicalDeviceVulkan12Features
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan12Features;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan12Features(VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge_ = {}, VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ = {}, VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ = {}, VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax_ = {}, VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ = {}, VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ = {}, VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ = {}, VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ = {}, VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ = {}, VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer_ = {}, VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId_ = {}) VULKAN_HPP_NOEXCEPT
+    : samplerMirrorClampToEdge( samplerMirrorClampToEdge_ ), drawIndirectCount( drawIndirectCount_ ), storageBuffer8BitAccess( storageBuffer8BitAccess_ ), uniformAndStorageBuffer8BitAccess( uniformAndStorageBuffer8BitAccess_ ), storagePushConstant8( storagePushConstant8_ ), shaderBufferInt64Atomics( shaderBufferInt64Atomics_ ), shaderSharedInt64Atomics( shaderSharedInt64Atomics_ ), shaderFloat16( shaderFloat16_ ), shaderInt8( shaderInt8_ ), descriptorIndexing( descriptorIndexing_ ), shaderInputAttachmentArrayDynamicIndexing( shaderInputAttachmentArrayDynamicIndexing_ ), shaderUniformTexelBufferArrayDynamicIndexing( shaderUniformTexelBufferArrayDynamicIndexing_ ), shaderStorageTexelBufferArrayDynamicIndexing( shaderStorageTexelBufferArrayDynamicIndexing_ ), shaderUniformBufferArrayNonUniformIndexing( shaderUniformBufferArrayNonUniformIndexing_ ), shaderSampledImageArrayNonUniformIndexing( shaderSampledImageArrayNonUniformIndexing_ ), shaderStorageBufferArrayNonUniformIndexing( shaderStorageBufferArrayNonUniformIndexing_ ), shaderStorageImageArrayNonUniformIndexing( shaderStorageImageArrayNonUniformIndexing_ ), shaderInputAttachmentArrayNonUniformIndexing( shaderInputAttachmentArrayNonUniformIndexing_ ), shaderUniformTexelBufferArrayNonUniformIndexing( shaderUniformTexelBufferArrayNonUniformIndexing_ ), shaderStorageTexelBufferArrayNonUniformIndexing( shaderStorageTexelBufferArrayNonUniformIndexing_ ), descriptorBindingUniformBufferUpdateAfterBind( descriptorBindingUniformBufferUpdateAfterBind_ ), descriptorBindingSampledImageUpdateAfterBind( descriptorBindingSampledImageUpdateAfterBind_ ), descriptorBindingStorageImageUpdateAfterBind( descriptorBindingStorageImageUpdateAfterBind_ ), descriptorBindingStorageBufferUpdateAfterBind( descriptorBindingStorageBufferUpdateAfterBind_ ), descriptorBindingUniformTexelBufferUpdateAfterBind( descriptorBindingUniformTexelBufferUpdateAfterBind_ ), descriptorBindingStorageTexelBufferUpdateAfterBind( descriptorBindingStorageTexelBufferUpdateAfterBind_ ), descriptorBindingUpdateUnusedWhilePending( descriptorBindingUpdateUnusedWhilePending_ ), descriptorBindingPartiallyBound( descriptorBindingPartiallyBound_ ), descriptorBindingVariableDescriptorCount( descriptorBindingVariableDescriptorCount_ ), runtimeDescriptorArray( runtimeDescriptorArray_ ), samplerFilterMinmax( samplerFilterMinmax_ ), scalarBlockLayout( scalarBlockLayout_ ), imagelessFramebuffer( imagelessFramebuffer_ ), uniformBufferStandardLayout( uniformBufferStandardLayout_ ), shaderSubgroupExtendedTypes( shaderSubgroupExtendedTypes_ ), separateDepthStencilLayouts( separateDepthStencilLayouts_ ), hostQueryReset( hostQueryReset_ ), timelineSemaphore( timelineSemaphore_ ), bufferDeviceAddress( bufferDeviceAddress_ ), bufferDeviceAddressCaptureReplay( bufferDeviceAddressCaptureReplay_ ), bufferDeviceAddressMultiDevice( bufferDeviceAddressMultiDevice_ ), vulkanMemoryModel( vulkanMemoryModel_ ), vulkanMemoryModelDeviceScope( vulkanMemoryModelDeviceScope_ ), vulkanMemoryModelAvailabilityVisibilityChains( vulkanMemoryModelAvailabilityVisibilityChains_ ), shaderOutputViewportIndex( shaderOutputViewportIndex_ ), shaderOutputLayer( shaderOutputLayer_ ), subgroupBroadcastDynamicId( subgroupBroadcastDynamicId_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan12Features( PhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceVulkan12Features( VkPhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceVulkan12Features & operator=( VkPhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Features const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceVulkan12Features & operator=( PhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceVulkan12Features ) );
+      return *this;
+    }
+
+    PhysicalDeviceVulkan12Features & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkan12Features & setSamplerMirrorClampToEdge( VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge_ ) VULKAN_HPP_NOEXCEPT
+    {
+      samplerMirrorClampToEdge = samplerMirrorClampToEdge_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkan12Features & setDrawIndirectCount( VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      drawIndirectCount = drawIndirectCount_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkan12Features & setStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT
+    {
+      storageBuffer8BitAccess = storageBuffer8BitAccess_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkan12Features & setUniformAndStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT
+    {
+      uniformAndStorageBuffer8BitAccess = uniformAndStorageBuffer8BitAccess_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkan12Features & setStoragePushConstant8( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ ) VULKAN_HPP_NOEXCEPT
+    {
+      storagePushConstant8 = storagePushConstant8_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkan12Features & setShaderBufferInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderBufferInt64Atomics = shaderBufferInt64Atomics_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkan12Features & setShaderSharedInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderSharedInt64Atomics = shaderSharedInt64Atomics_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkan12Features & setShaderFloat16( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderFloat16 = shaderFloat16_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkan12Features & setShaderInt8( VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderInt8 = shaderInt8_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkan12Features & setDescriptorIndexing( VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorIndexing = descriptorIndexing_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkan12Features & setShaderInputAttachmentArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderInputAttachmentArrayDynamicIndexing = shaderInputAttachmentArrayDynamicIndexing_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkan12Features & setShaderUniformTexelBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderUniformTexelBufferArrayDynamicIndexing = shaderUniformTexelBufferArrayDynamicIndexing_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkan12Features & setShaderStorageTexelBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderStorageTexelBufferArrayDynamicIndexing = shaderStorageTexelBufferArrayDynamicIndexing_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkan12Features & setShaderUniformBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderUniformBufferArrayNonUniformIndexing = shaderUniformBufferArrayNonUniformIndexing_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkan12Features & setShaderSampledImageArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderSampledImageArrayNonUniformIndexing = shaderSampledImageArrayNonUniformIndexing_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkan12Features & setShaderStorageBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderStorageBufferArrayNonUniformIndexing = shaderStorageBufferArrayNonUniformIndexing_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkan12Features & setShaderStorageImageArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderStorageImageArrayNonUniformIndexing = shaderStorageImageArrayNonUniformIndexing_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkan12Features & setShaderInputAttachmentArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderInputAttachmentArrayNonUniformIndexing = shaderInputAttachmentArrayNonUniformIndexing_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkan12Features & setShaderUniformTexelBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderUniformTexelBufferArrayNonUniformIndexing = shaderUniformTexelBufferArrayNonUniformIndexing_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkan12Features & setShaderStorageTexelBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderStorageTexelBufferArrayNonUniformIndexing = shaderStorageTexelBufferArrayNonUniformIndexing_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkan12Features & setDescriptorBindingUniformBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorBindingUniformBufferUpdateAfterBind = descriptorBindingUniformBufferUpdateAfterBind_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkan12Features & setDescriptorBindingSampledImageUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorBindingSampledImageUpdateAfterBind = descriptorBindingSampledImageUpdateAfterBind_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkan12Features & setDescriptorBindingStorageImageUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorBindingStorageImageUpdateAfterBind = descriptorBindingStorageImageUpdateAfterBind_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkan12Features & setDescriptorBindingStorageBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorBindingStorageBufferUpdateAfterBind = descriptorBindingStorageBufferUpdateAfterBind_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkan12Features & setDescriptorBindingUniformTexelBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorBindingUniformTexelBufferUpdateAfterBind = descriptorBindingUniformTexelBufferUpdateAfterBind_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkan12Features & setDescriptorBindingStorageTexelBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorBindingStorageTexelBufferUpdateAfterBind = descriptorBindingStorageTexelBufferUpdateAfterBind_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkan12Features & setDescriptorBindingUpdateUnusedWhilePending( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorBindingUpdateUnusedWhilePending = descriptorBindingUpdateUnusedWhilePending_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkan12Features & setDescriptorBindingPartiallyBound( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorBindingPartiallyBound = descriptorBindingPartiallyBound_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkan12Features & setDescriptorBindingVariableDescriptorCount( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorBindingVariableDescriptorCount = descriptorBindingVariableDescriptorCount_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkan12Features & setRuntimeDescriptorArray( VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ ) VULKAN_HPP_NOEXCEPT
+    {
+      runtimeDescriptorArray = runtimeDescriptorArray_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkan12Features & setSamplerFilterMinmax( VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax_ ) VULKAN_HPP_NOEXCEPT
+    {
+      samplerFilterMinmax = samplerFilterMinmax_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkan12Features & setScalarBlockLayout( VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      scalarBlockLayout = scalarBlockLayout_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkan12Features & setImagelessFramebuffer( VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imagelessFramebuffer = imagelessFramebuffer_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkan12Features & setUniformBufferStandardLayout( VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      uniformBufferStandardLayout = uniformBufferStandardLayout_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkan12Features & setShaderSubgroupExtendedTypes( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderSubgroupExtendedTypes = shaderSubgroupExtendedTypes_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkan12Features & setSeparateDepthStencilLayouts( VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ ) VULKAN_HPP_NOEXCEPT
+    {
+      separateDepthStencilLayouts = separateDepthStencilLayouts_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkan12Features & setHostQueryReset( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      hostQueryReset = hostQueryReset_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkan12Features & setTimelineSemaphore( VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ ) VULKAN_HPP_NOEXCEPT
+    {
+      timelineSemaphore = timelineSemaphore_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkan12Features & setBufferDeviceAddress( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bufferDeviceAddress = bufferDeviceAddress_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkan12Features & setBufferDeviceAddressCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkan12Features & setBufferDeviceAddressMultiDevice( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkan12Features & setVulkanMemoryModel( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vulkanMemoryModel = vulkanMemoryModel_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkan12Features & setVulkanMemoryModelDeviceScope( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vulkanMemoryModelDeviceScope = vulkanMemoryModelDeviceScope_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkan12Features & setVulkanMemoryModelAvailabilityVisibilityChains( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vulkanMemoryModelAvailabilityVisibilityChains = vulkanMemoryModelAvailabilityVisibilityChains_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkan12Features & setShaderOutputViewportIndex( VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderOutputViewportIndex = shaderOutputViewportIndex_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkan12Features & setShaderOutputLayer( VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderOutputLayer = shaderOutputLayer_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkan12Features & setSubgroupBroadcastDynamicId( VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId_ ) VULKAN_HPP_NOEXCEPT
+    {
+      subgroupBroadcastDynamicId = subgroupBroadcastDynamicId_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceVulkan12Features const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceVulkan12Features*>( this );
+    }
+
+    operator VkPhysicalDeviceVulkan12Features &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceVulkan12Features*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceVulkan12Features const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceVulkan12Features const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( samplerMirrorClampToEdge == rhs.samplerMirrorClampToEdge )
+          && ( drawIndirectCount == rhs.drawIndirectCount )
+          && ( storageBuffer8BitAccess == rhs.storageBuffer8BitAccess )
+          && ( uniformAndStorageBuffer8BitAccess == rhs.uniformAndStorageBuffer8BitAccess )
+          && ( storagePushConstant8 == rhs.storagePushConstant8 )
+          && ( shaderBufferInt64Atomics == rhs.shaderBufferInt64Atomics )
+          && ( shaderSharedInt64Atomics == rhs.shaderSharedInt64Atomics )
+          && ( shaderFloat16 == rhs.shaderFloat16 )
+          && ( shaderInt8 == rhs.shaderInt8 )
+          && ( descriptorIndexing == rhs.descriptorIndexing )
+          && ( shaderInputAttachmentArrayDynamicIndexing == rhs.shaderInputAttachmentArrayDynamicIndexing )
+          && ( shaderUniformTexelBufferArrayDynamicIndexing == rhs.shaderUniformTexelBufferArrayDynamicIndexing )
+          && ( shaderStorageTexelBufferArrayDynamicIndexing == rhs.shaderStorageTexelBufferArrayDynamicIndexing )
+          && ( shaderUniformBufferArrayNonUniformIndexing == rhs.shaderUniformBufferArrayNonUniformIndexing )
+          && ( shaderSampledImageArrayNonUniformIndexing == rhs.shaderSampledImageArrayNonUniformIndexing )
+          && ( shaderStorageBufferArrayNonUniformIndexing == rhs.shaderStorageBufferArrayNonUniformIndexing )
+          && ( shaderStorageImageArrayNonUniformIndexing == rhs.shaderStorageImageArrayNonUniformIndexing )
+          && ( shaderInputAttachmentArrayNonUniformIndexing == rhs.shaderInputAttachmentArrayNonUniformIndexing )
+          && ( shaderUniformTexelBufferArrayNonUniformIndexing == rhs.shaderUniformTexelBufferArrayNonUniformIndexing )
+          && ( shaderStorageTexelBufferArrayNonUniformIndexing == rhs.shaderStorageTexelBufferArrayNonUniformIndexing )
+          && ( descriptorBindingUniformBufferUpdateAfterBind == rhs.descriptorBindingUniformBufferUpdateAfterBind )
+          && ( descriptorBindingSampledImageUpdateAfterBind == rhs.descriptorBindingSampledImageUpdateAfterBind )
+          && ( descriptorBindingStorageImageUpdateAfterBind == rhs.descriptorBindingStorageImageUpdateAfterBind )
+          && ( descriptorBindingStorageBufferUpdateAfterBind == rhs.descriptorBindingStorageBufferUpdateAfterBind )
+          && ( descriptorBindingUniformTexelBufferUpdateAfterBind == rhs.descriptorBindingUniformTexelBufferUpdateAfterBind )
+          && ( descriptorBindingStorageTexelBufferUpdateAfterBind == rhs.descriptorBindingStorageTexelBufferUpdateAfterBind )
+          && ( descriptorBindingUpdateUnusedWhilePending == rhs.descriptorBindingUpdateUnusedWhilePending )
+          && ( descriptorBindingPartiallyBound == rhs.descriptorBindingPartiallyBound )
+          && ( descriptorBindingVariableDescriptorCount == rhs.descriptorBindingVariableDescriptorCount )
+          && ( runtimeDescriptorArray == rhs.runtimeDescriptorArray )
+          && ( samplerFilterMinmax == rhs.samplerFilterMinmax )
+          && ( scalarBlockLayout == rhs.scalarBlockLayout )
+          && ( imagelessFramebuffer == rhs.imagelessFramebuffer )
+          && ( uniformBufferStandardLayout == rhs.uniformBufferStandardLayout )
+          && ( shaderSubgroupExtendedTypes == rhs.shaderSubgroupExtendedTypes )
+          && ( separateDepthStencilLayouts == rhs.separateDepthStencilLayouts )
+          && ( hostQueryReset == rhs.hostQueryReset )
+          && ( timelineSemaphore == rhs.timelineSemaphore )
+          && ( bufferDeviceAddress == rhs.bufferDeviceAddress )
+          && ( bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay )
+          && ( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice )
+          && ( vulkanMemoryModel == rhs.vulkanMemoryModel )
+          && ( vulkanMemoryModelDeviceScope == rhs.vulkanMemoryModelDeviceScope )
+          && ( vulkanMemoryModelAvailabilityVisibilityChains == rhs.vulkanMemoryModelAvailabilityVisibilityChains )
+          && ( shaderOutputViewportIndex == rhs.shaderOutputViewportIndex )
+          && ( shaderOutputLayer == rhs.shaderOutputLayer )
+          && ( subgroupBroadcastDynamicId == rhs.subgroupBroadcastDynamicId );
+    }
+
+    bool operator!=( PhysicalDeviceVulkan12Features const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan12Features;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge = {};
+    VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount = {};
+    VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess = {};
+    VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess = {};
+    VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderInt8 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing = {};
+    VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind = {};
+    VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind = {};
+    VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind = {};
+    VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind = {};
+    VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind = {};
+    VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind = {};
+    VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending = {};
+    VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound = {};
+    VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount = {};
+    VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray = {};
+    VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax = {};
+    VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout = {};
+    VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer = {};
+    VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes = {};
+    VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts = {};
+    VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset = {};
+    VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore = {};
+    VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress = {};
+    VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay = {};
+    VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice = {};
+    VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel = {};
+    VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope = {};
+    VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer = {};
+    VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceVulkan12Features ) == sizeof( VkPhysicalDeviceVulkan12Features ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceVulkan12Features>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceVulkan12Features>
+  {
+    using Type = PhysicalDeviceVulkan12Features;
+  };
+
+  struct PhysicalDeviceVulkan12Properties
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan12Properties;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Properties(VULKAN_HPP_NAMESPACE::DriverId driverID_ = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary, std::array<char,VK_MAX_DRIVER_NAME_SIZE> const& driverName_ = {}, std::array<char,VK_MAX_DRIVER_INFO_SIZE> const& driverInfo_ = {}, VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion_ = {}, VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64_ = {}, uint32_t maxUpdateAfterBindDescriptorsInAllPools_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindSamplers_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments_ = {}, uint32_t maxPerStageUpdateAfterBindResources_ = {}, uint32_t maxDescriptorSetUpdateAfterBindSamplers_ = {}, uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers_ = {}, uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ = {}, uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers_ = {}, uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ = {}, uint32_t maxDescriptorSetUpdateAfterBindSampledImages_ = {}, uint32_t maxDescriptorSetUpdateAfterBindStorageImages_ = {}, uint32_t maxDescriptorSetUpdateAfterBindInputAttachments_ = {}, VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes_ = {}, VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes_ = {}, VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone_ = {}, VULKAN_HPP_NAMESPACE::Bool32 independentResolve_ = {}, VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats_ = {}, VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping_ = {}, uint64_t maxTimelineSemaphoreValueDifference_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferIntegerColorSampleCounts_ = {}) VULKAN_HPP_NOEXCEPT
+    : driverID( driverID_ ), driverName( driverName_ ), driverInfo( driverInfo_ ), conformanceVersion( conformanceVersion_ ), denormBehaviorIndependence( denormBehaviorIndependence_ ), roundingModeIndependence( roundingModeIndependence_ ), shaderSignedZeroInfNanPreserveFloat16( shaderSignedZeroInfNanPreserveFloat16_ ), shaderSignedZeroInfNanPreserveFloat32( shaderSignedZeroInfNanPreserveFloat32_ ), shaderSignedZeroInfNanPreserveFloat64( shaderSignedZeroInfNanPreserveFloat64_ ), shaderDenormPreserveFloat16( shaderDenormPreserveFloat16_ ), shaderDenormPreserveFloat32( shaderDenormPreserveFloat32_ ), shaderDenormPreserveFloat64( shaderDenormPreserveFloat64_ ), shaderDenormFlushToZeroFloat16( shaderDenormFlushToZeroFloat16_ ), shaderDenormFlushToZeroFloat32( shaderDenormFlushToZeroFloat32_ ), shaderDenormFlushToZeroFloat64( shaderDenormFlushToZeroFloat64_ ), shaderRoundingModeRTEFloat16( shaderRoundingModeRTEFloat16_ ), shaderRoundingModeRTEFloat32( shaderRoundingModeRTEFloat32_ ), shaderRoundingModeRTEFloat64( shaderRoundingModeRTEFloat64_ ), shaderRoundingModeRTZFloat16( shaderRoundingModeRTZFloat16_ ), shaderRoundingModeRTZFloat32( shaderRoundingModeRTZFloat32_ ), shaderRoundingModeRTZFloat64( shaderRoundingModeRTZFloat64_ ), maxUpdateAfterBindDescriptorsInAllPools( maxUpdateAfterBindDescriptorsInAllPools_ ), shaderUniformBufferArrayNonUniformIndexingNative( shaderUniformBufferArrayNonUniformIndexingNative_ ), shaderSampledImageArrayNonUniformIndexingNative( shaderSampledImageArrayNonUniformIndexingNative_ ), shaderStorageBufferArrayNonUniformIndexingNative( shaderStorageBufferArrayNonUniformIndexingNative_ ), shaderStorageImageArrayNonUniformIndexingNative( shaderStorageImageArrayNonUniformIndexingNative_ ), shaderInputAttachmentArrayNonUniformIndexingNative( shaderInputAttachmentArrayNonUniformIndexingNative_ ), robustBufferAccessUpdateAfterBind( robustBufferAccessUpdateAfterBind_ ), quadDivergentImplicitLod( quadDivergentImplicitLod_ ), maxPerStageDescriptorUpdateAfterBindSamplers( maxPerStageDescriptorUpdateAfterBindSamplers_ ), maxPerStageDescriptorUpdateAfterBindUniformBuffers( maxPerStageDescriptorUpdateAfterBindUniformBuffers_ ), maxPerStageDescriptorUpdateAfterBindStorageBuffers( maxPerStageDescriptorUpdateAfterBindStorageBuffers_ ), maxPerStageDescriptorUpdateAfterBindSampledImages( maxPerStageDescriptorUpdateAfterBindSampledImages_ ), maxPerStageDescriptorUpdateAfterBindStorageImages( maxPerStageDescriptorUpdateAfterBindStorageImages_ ), maxPerStageDescriptorUpdateAfterBindInputAttachments( maxPerStageDescriptorUpdateAfterBindInputAttachments_ ), maxPerStageUpdateAfterBindResources( maxPerStageUpdateAfterBindResources_ ), maxDescriptorSetUpdateAfterBindSamplers( maxDescriptorSetUpdateAfterBindSamplers_ ), maxDescriptorSetUpdateAfterBindUniformBuffers( maxDescriptorSetUpdateAfterBindUniformBuffers_ ), maxDescriptorSetUpdateAfterBindUniformBuffersDynamic( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ ), maxDescriptorSetUpdateAfterBindStorageBuffers( maxDescriptorSetUpdateAfterBindStorageBuffers_ ), maxDescriptorSetUpdateAfterBindStorageBuffersDynamic( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ ), maxDescriptorSetUpdateAfterBindSampledImages( maxDescriptorSetUpdateAfterBindSampledImages_ ), maxDescriptorSetUpdateAfterBindStorageImages( maxDescriptorSetUpdateAfterBindStorageImages_ ), maxDescriptorSetUpdateAfterBindInputAttachments( maxDescriptorSetUpdateAfterBindInputAttachments_ ), supportedDepthResolveModes( supportedDepthResolveModes_ ), supportedStencilResolveModes( supportedStencilResolveModes_ ), independentResolveNone( independentResolveNone_ ), independentResolve( independentResolve_ ), filterMinmaxSingleComponentFormats( filterMinmaxSingleComponentFormats_ ), filterMinmaxImageComponentMapping( filterMinmaxImageComponentMapping_ ), maxTimelineSemaphoreValueDifference( maxTimelineSemaphoreValueDifference_ ), framebufferIntegerColorSampleCounts( framebufferIntegerColorSampleCounts_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Properties( PhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceVulkan12Properties( VkPhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceVulkan12Properties & operator=( VkPhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Properties const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceVulkan12Properties & operator=( PhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceVulkan12Properties ) );
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceVulkan12Properties const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceVulkan12Properties*>( this );
+    }
+
+    operator VkPhysicalDeviceVulkan12Properties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceVulkan12Properties*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceVulkan12Properties const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceVulkan12Properties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( driverID == rhs.driverID )
+          && ( driverName == rhs.driverName )
+          && ( driverInfo == rhs.driverInfo )
+          && ( conformanceVersion == rhs.conformanceVersion )
+          && ( denormBehaviorIndependence == rhs.denormBehaviorIndependence )
+          && ( roundingModeIndependence == rhs.roundingModeIndependence )
+          && ( shaderSignedZeroInfNanPreserveFloat16 == rhs.shaderSignedZeroInfNanPreserveFloat16 )
+          && ( shaderSignedZeroInfNanPreserveFloat32 == rhs.shaderSignedZeroInfNanPreserveFloat32 )
+          && ( shaderSignedZeroInfNanPreserveFloat64 == rhs.shaderSignedZeroInfNanPreserveFloat64 )
+          && ( shaderDenormPreserveFloat16 == rhs.shaderDenormPreserveFloat16 )
+          && ( shaderDenormPreserveFloat32 == rhs.shaderDenormPreserveFloat32 )
+          && ( shaderDenormPreserveFloat64 == rhs.shaderDenormPreserveFloat64 )
+          && ( shaderDenormFlushToZeroFloat16 == rhs.shaderDenormFlushToZeroFloat16 )
+          && ( shaderDenormFlushToZeroFloat32 == rhs.shaderDenormFlushToZeroFloat32 )
+          && ( shaderDenormFlushToZeroFloat64 == rhs.shaderDenormFlushToZeroFloat64 )
+          && ( shaderRoundingModeRTEFloat16 == rhs.shaderRoundingModeRTEFloat16 )
+          && ( shaderRoundingModeRTEFloat32 == rhs.shaderRoundingModeRTEFloat32 )
+          && ( shaderRoundingModeRTEFloat64 == rhs.shaderRoundingModeRTEFloat64 )
+          && ( shaderRoundingModeRTZFloat16 == rhs.shaderRoundingModeRTZFloat16 )
+          && ( shaderRoundingModeRTZFloat32 == rhs.shaderRoundingModeRTZFloat32 )
+          && ( shaderRoundingModeRTZFloat64 == rhs.shaderRoundingModeRTZFloat64 )
+          && ( maxUpdateAfterBindDescriptorsInAllPools == rhs.maxUpdateAfterBindDescriptorsInAllPools )
+          && ( shaderUniformBufferArrayNonUniformIndexingNative == rhs.shaderUniformBufferArrayNonUniformIndexingNative )
+          && ( shaderSampledImageArrayNonUniformIndexingNative == rhs.shaderSampledImageArrayNonUniformIndexingNative )
+          && ( shaderStorageBufferArrayNonUniformIndexingNative == rhs.shaderStorageBufferArrayNonUniformIndexingNative )
+          && ( shaderStorageImageArrayNonUniformIndexingNative == rhs.shaderStorageImageArrayNonUniformIndexingNative )
+          && ( shaderInputAttachmentArrayNonUniformIndexingNative == rhs.shaderInputAttachmentArrayNonUniformIndexingNative )
+          && ( robustBufferAccessUpdateAfterBind == rhs.robustBufferAccessUpdateAfterBind )
+          && ( quadDivergentImplicitLod == rhs.quadDivergentImplicitLod )
+          && ( maxPerStageDescriptorUpdateAfterBindSamplers == rhs.maxPerStageDescriptorUpdateAfterBindSamplers )
+          && ( maxPerStageDescriptorUpdateAfterBindUniformBuffers == rhs.maxPerStageDescriptorUpdateAfterBindUniformBuffers )
+          && ( maxPerStageDescriptorUpdateAfterBindStorageBuffers == rhs.maxPerStageDescriptorUpdateAfterBindStorageBuffers )
+          && ( maxPerStageDescriptorUpdateAfterBindSampledImages == rhs.maxPerStageDescriptorUpdateAfterBindSampledImages )
+          && ( maxPerStageDescriptorUpdateAfterBindStorageImages == rhs.maxPerStageDescriptorUpdateAfterBindStorageImages )
+          && ( maxPerStageDescriptorUpdateAfterBindInputAttachments == rhs.maxPerStageDescriptorUpdateAfterBindInputAttachments )
+          && ( maxPerStageUpdateAfterBindResources == rhs.maxPerStageUpdateAfterBindResources )
+          && ( maxDescriptorSetUpdateAfterBindSamplers == rhs.maxDescriptorSetUpdateAfterBindSamplers )
+          && ( maxDescriptorSetUpdateAfterBindUniformBuffers == rhs.maxDescriptorSetUpdateAfterBindUniformBuffers )
+          && ( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic )
+          && ( maxDescriptorSetUpdateAfterBindStorageBuffers == rhs.maxDescriptorSetUpdateAfterBindStorageBuffers )
+          && ( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic )
+          && ( maxDescriptorSetUpdateAfterBindSampledImages == rhs.maxDescriptorSetUpdateAfterBindSampledImages )
+          && ( maxDescriptorSetUpdateAfterBindStorageImages == rhs.maxDescriptorSetUpdateAfterBindStorageImages )
+          && ( maxDescriptorSetUpdateAfterBindInputAttachments == rhs.maxDescriptorSetUpdateAfterBindInputAttachments )
+          && ( supportedDepthResolveModes == rhs.supportedDepthResolveModes )
+          && ( supportedStencilResolveModes == rhs.supportedStencilResolveModes )
+          && ( independentResolveNone == rhs.independentResolveNone )
+          && ( independentResolve == rhs.independentResolve )
+          && ( filterMinmaxSingleComponentFormats == rhs.filterMinmaxSingleComponentFormats )
+          && ( filterMinmaxImageComponentMapping == rhs.filterMinmaxImageComponentMapping )
+          && ( maxTimelineSemaphoreValueDifference == rhs.maxTimelineSemaphoreValueDifference )
+          && ( framebufferIntegerColorSampleCounts == rhs.framebufferIntegerColorSampleCounts );
+    }
+
+    bool operator!=( PhysicalDeviceVulkan12Properties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan12Properties;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::DriverId driverID = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary;
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DRIVER_NAME_SIZE> driverName = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DRIVER_INFO_SIZE> driverInfo = {};
+    VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion = {};
+    VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly;
+    VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly;
+    VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32 = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64 = {};
+    uint32_t maxUpdateAfterBindDescriptorsInAllPools = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative = {};
+    VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind = {};
+    VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod = {};
+    uint32_t maxPerStageDescriptorUpdateAfterBindSamplers = {};
+    uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers = {};
+    uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers = {};
+    uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages = {};
+    uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages = {};
+    uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments = {};
+    uint32_t maxPerStageUpdateAfterBindResources = {};
+    uint32_t maxDescriptorSetUpdateAfterBindSamplers = {};
+    uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers = {};
+    uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = {};
+    uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers = {};
+    uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = {};
+    uint32_t maxDescriptorSetUpdateAfterBindSampledImages = {};
+    uint32_t maxDescriptorSetUpdateAfterBindStorageImages = {};
+    uint32_t maxDescriptorSetUpdateAfterBindInputAttachments = {};
+    VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes = {};
+    VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes = {};
+    VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone = {};
+    VULKAN_HPP_NAMESPACE::Bool32 independentResolve = {};
+    VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats = {};
+    VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping = {};
+    uint64_t maxTimelineSemaphoreValueDifference = {};
+    VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferIntegerColorSampleCounts = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceVulkan12Properties ) == sizeof( VkPhysicalDeviceVulkan12Properties ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceVulkan12Properties>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceVulkan12Properties>
+  {
+    using Type = PhysicalDeviceVulkan12Properties;
+  };
+
+  struct PhysicalDeviceVulkanMemoryModelFeatures
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkanMemoryModelFeatures;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkanMemoryModelFeatures(VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ = {}) VULKAN_HPP_NOEXCEPT
+    : vulkanMemoryModel( vulkanMemoryModel_ ), vulkanMemoryModelDeviceScope( vulkanMemoryModelDeviceScope_ ), vulkanMemoryModelAvailabilityVisibilityChains( vulkanMemoryModelAvailabilityVisibilityChains_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkanMemoryModelFeatures( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceVulkanMemoryModelFeatures( VkPhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceVulkanMemoryModelFeatures & operator=( VkPhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkanMemoryModelFeatures const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceVulkanMemoryModelFeatures & operator=( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceVulkanMemoryModelFeatures ) );
+      return *this;
+    }
+
+    PhysicalDeviceVulkanMemoryModelFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkanMemoryModelFeatures & setVulkanMemoryModel( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vulkanMemoryModel = vulkanMemoryModel_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkanMemoryModelFeatures & setVulkanMemoryModelDeviceScope( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vulkanMemoryModelDeviceScope = vulkanMemoryModelDeviceScope_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkanMemoryModelFeatures & setVulkanMemoryModelAvailabilityVisibilityChains( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vulkanMemoryModelAvailabilityVisibilityChains = vulkanMemoryModelAvailabilityVisibilityChains_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceVulkanMemoryModelFeatures const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceVulkanMemoryModelFeatures*>( this );
+    }
+
+    operator VkPhysicalDeviceVulkanMemoryModelFeatures &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceVulkanMemoryModelFeatures*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceVulkanMemoryModelFeatures const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceVulkanMemoryModelFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( vulkanMemoryModel == rhs.vulkanMemoryModel )
+          && ( vulkanMemoryModelDeviceScope == rhs.vulkanMemoryModelDeviceScope )
+          && ( vulkanMemoryModelAvailabilityVisibilityChains == rhs.vulkanMemoryModelAvailabilityVisibilityChains );
+    }
+
+    bool operator!=( PhysicalDeviceVulkanMemoryModelFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkanMemoryModelFeatures;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel = {};
+    VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope = {};
+    VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceVulkanMemoryModelFeatures ) == sizeof( VkPhysicalDeviceVulkanMemoryModelFeatures ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceVulkanMemoryModelFeatures>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceVulkanMemoryModelFeatures>
+  {
+    using Type = PhysicalDeviceVulkanMemoryModelFeatures;
+  };
+  using PhysicalDeviceVulkanMemoryModelFeaturesKHR = PhysicalDeviceVulkanMemoryModelFeatures;
+
+  struct PhysicalDeviceYcbcrImageArraysFeaturesEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrImageArraysFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays_ = {}) VULKAN_HPP_NOEXCEPT
+    : ycbcrImageArrays( ycbcrImageArrays_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrImageArraysFeaturesEXT( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PhysicalDeviceYcbcrImageArraysFeaturesEXT( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PhysicalDeviceYcbcrImageArraysFeaturesEXT & operator=( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrImageArraysFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+    PhysicalDeviceYcbcrImageArraysFeaturesEXT & operator=( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PhysicalDeviceYcbcrImageArraysFeaturesEXT ) );
+      return *this;
+    }
+
+    PhysicalDeviceYcbcrImageArraysFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceYcbcrImageArraysFeaturesEXT & setYcbcrImageArrays( VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays_ ) VULKAN_HPP_NOEXCEPT
+    {
+      ycbcrImageArrays = ycbcrImageArrays_;
+      return *this;
+    }
+
+
+    operator VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceYcbcrImageArraysFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceYcbcrImageArraysFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceYcbcrImageArraysFeaturesEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PhysicalDeviceYcbcrImageArraysFeaturesEXT const& ) const = default;
+#else
+    bool operator==( PhysicalDeviceYcbcrImageArraysFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( ycbcrImageArrays == rhs.ycbcrImageArrays );
+    }
+
+    bool operator!=( PhysicalDeviceYcbcrImageArraysFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays = {};
+
+  };
+  static_assert( sizeof( PhysicalDeviceYcbcrImageArraysFeaturesEXT ) == sizeof( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceYcbcrImageArraysFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT>
+  {
+    using Type = PhysicalDeviceYcbcrImageArraysFeaturesEXT;
   };
 
   struct PipelineColorBlendAdvancedStateCreateInfoEXT
   {
-    PipelineColorBlendAdvancedStateCreateInfoEXT( Bool32 srcPremultiplied_ = 0,
-                                                  Bool32 dstPremultiplied_ = 0,
-                                                  BlendOverlapEXT blendOverlap_ = BlendOverlapEXT::eUncorrelated )
-      : srcPremultiplied( srcPremultiplied_ )
-      , dstPremultiplied( dstPremultiplied_ )
-      , blendOverlap( blendOverlap_ )
-    {
-    }
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT;
 
-    PipelineColorBlendAdvancedStateCreateInfoEXT( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PipelineColorBlendAdvancedStateCreateInfoEXT ) );
-    }
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PipelineColorBlendAdvancedStateCreateInfoEXT(VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied_ = {}, VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_ = {}, VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_ = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated) VULKAN_HPP_NOEXCEPT
+    : srcPremultiplied( srcPremultiplied_ ), dstPremultiplied( dstPremultiplied_ ), blendOverlap( blendOverlap_ )
+    {}
 
-    PipelineColorBlendAdvancedStateCreateInfoEXT& operator=( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs )
+    VULKAN_HPP_CONSTEXPR PipelineColorBlendAdvancedStateCreateInfoEXT( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineColorBlendAdvancedStateCreateInfoEXT( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      memcpy( this, &rhs, sizeof( PipelineColorBlendAdvancedStateCreateInfoEXT ) );
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PipelineColorBlendAdvancedStateCreateInfoEXT & operator=( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineColorBlendAdvancedStateCreateInfoEXT const *>( &rhs );
       return *this;
     }
-    PipelineColorBlendAdvancedStateCreateInfoEXT& setPNext( const void* pNext_ )
+
+    PipelineColorBlendAdvancedStateCreateInfoEXT & operator=( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineColorBlendAdvancedStateCreateInfoEXT ) );
+      return *this;
+    }
+
+    PipelineColorBlendAdvancedStateCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
       pNext = pNext_;
       return *this;
     }
 
-    PipelineColorBlendAdvancedStateCreateInfoEXT& setSrcPremultiplied( Bool32 srcPremultiplied_ )
+    PipelineColorBlendAdvancedStateCreateInfoEXT & setSrcPremultiplied( VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied_ ) VULKAN_HPP_NOEXCEPT
     {
       srcPremultiplied = srcPremultiplied_;
       return *this;
     }
 
-    PipelineColorBlendAdvancedStateCreateInfoEXT& setDstPremultiplied( Bool32 dstPremultiplied_ )
+    PipelineColorBlendAdvancedStateCreateInfoEXT & setDstPremultiplied( VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_ ) VULKAN_HPP_NOEXCEPT
     {
       dstPremultiplied = dstPremultiplied_;
       return *this;
     }
 
-    PipelineColorBlendAdvancedStateCreateInfoEXT& setBlendOverlap( BlendOverlapEXT blendOverlap_ )
+    PipelineColorBlendAdvancedStateCreateInfoEXT & setBlendOverlap( VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_ ) VULKAN_HPP_NOEXCEPT
     {
       blendOverlap = blendOverlap_;
       return *this;
     }
 
-    operator VkPipelineColorBlendAdvancedStateCreateInfoEXT const&() const
+
+    operator VkPipelineColorBlendAdvancedStateCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkPipelineColorBlendAdvancedStateCreateInfoEXT*>(this);
+      return *reinterpret_cast<const VkPipelineColorBlendAdvancedStateCreateInfoEXT*>( this );
     }
 
-    operator VkPipelineColorBlendAdvancedStateCreateInfoEXT &()
+    operator VkPipelineColorBlendAdvancedStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkPipelineColorBlendAdvancedStateCreateInfoEXT*>(this);
+      return *reinterpret_cast<VkPipelineColorBlendAdvancedStateCreateInfoEXT*>( this );
     }
 
-    bool operator==( PipelineColorBlendAdvancedStateCreateInfoEXT const& rhs ) const
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PipelineColorBlendAdvancedStateCreateInfoEXT const& ) const = default;
+#else
+    bool operator==( PipelineColorBlendAdvancedStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
@@ -35802,102 +73130,215 @@
           && ( blendOverlap == rhs.blendOverlap );
     }
 
-    bool operator!=( PipelineColorBlendAdvancedStateCreateInfoEXT const& rhs ) const
+    bool operator!=( PipelineColorBlendAdvancedStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
+#endif
 
-  private:
-    StructureType sType = StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT;
+
 
   public:
-    const void* pNext = nullptr;
-    Bool32 srcPremultiplied;
-    Bool32 dstPremultiplied;
-    BlendOverlapEXT blendOverlap;
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied = {};
+    VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied = {};
+    VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated;
+
   };
   static_assert( sizeof( PipelineColorBlendAdvancedStateCreateInfoEXT ) == sizeof( VkPipelineColorBlendAdvancedStateCreateInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineColorBlendAdvancedStateCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
 
-  enum class CoverageModulationModeNV
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT>
   {
-    eNone = VK_COVERAGE_MODULATION_MODE_NONE_NV,
-    eRgb = VK_COVERAGE_MODULATION_MODE_RGB_NV,
-    eAlpha = VK_COVERAGE_MODULATION_MODE_ALPHA_NV,
-    eRgba = VK_COVERAGE_MODULATION_MODE_RGBA_NV
+    using Type = PipelineColorBlendAdvancedStateCreateInfoEXT;
   };
 
-  struct PipelineCoverageModulationStateCreateInfoNV
+  struct PipelineCompilerControlCreateInfoAMD
   {
-    PipelineCoverageModulationStateCreateInfoNV( PipelineCoverageModulationStateCreateFlagsNV flags_ = PipelineCoverageModulationStateCreateFlagsNV(),
-                                                 CoverageModulationModeNV coverageModulationMode_ = CoverageModulationModeNV::eNone,
-                                                 Bool32 coverageModulationTableEnable_ = 0,
-                                                 uint32_t coverageModulationTableCount_ = 0,
-                                                 const float* pCoverageModulationTable_ = nullptr )
-      : flags( flags_ )
-      , coverageModulationMode( coverageModulationMode_ )
-      , coverageModulationTableEnable( coverageModulationTableEnable_ )
-      , coverageModulationTableCount( coverageModulationTableCount_ )
-      , pCoverageModulationTable( pCoverageModulationTable_ )
-    {
-    }
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCompilerControlCreateInfoAMD;
 
-    PipelineCoverageModulationStateCreateInfoNV( VkPipelineCoverageModulationStateCreateInfoNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PipelineCoverageModulationStateCreateInfoNV ) );
-    }
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PipelineCompilerControlCreateInfoAMD(VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags_ = {}) VULKAN_HPP_NOEXCEPT
+    : compilerControlFlags( compilerControlFlags_ )
+    {}
 
-    PipelineCoverageModulationStateCreateInfoNV& operator=( VkPipelineCoverageModulationStateCreateInfoNV const & rhs )
+    VULKAN_HPP_CONSTEXPR PipelineCompilerControlCreateInfoAMD( PipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineCompilerControlCreateInfoAMD( VkPipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      memcpy( this, &rhs, sizeof( PipelineCoverageModulationStateCreateInfoNV ) );
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PipelineCompilerControlCreateInfoAMD & operator=( VkPipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCompilerControlCreateInfoAMD const *>( &rhs );
       return *this;
     }
-    PipelineCoverageModulationStateCreateInfoNV& setPNext( const void* pNext_ )
+
+    PipelineCompilerControlCreateInfoAMD & operator=( PipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineCompilerControlCreateInfoAMD ) );
+      return *this;
+    }
+
+    PipelineCompilerControlCreateInfoAMD & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
       pNext = pNext_;
       return *this;
     }
 
-    PipelineCoverageModulationStateCreateInfoNV& setFlags( PipelineCoverageModulationStateCreateFlagsNV flags_ )
+    PipelineCompilerControlCreateInfoAMD & setCompilerControlFlags( VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      compilerControlFlags = compilerControlFlags_;
+      return *this;
+    }
+
+
+    operator VkPipelineCompilerControlCreateInfoAMD const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineCompilerControlCreateInfoAMD*>( this );
+    }
+
+    operator VkPipelineCompilerControlCreateInfoAMD &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineCompilerControlCreateInfoAMD*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PipelineCompilerControlCreateInfoAMD const& ) const = default;
+#else
+    bool operator==( PipelineCompilerControlCreateInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( compilerControlFlags == rhs.compilerControlFlags );
+    }
+
+    bool operator!=( PipelineCompilerControlCreateInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCompilerControlCreateInfoAMD;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags = {};
+
+  };
+  static_assert( sizeof( PipelineCompilerControlCreateInfoAMD ) == sizeof( VkPipelineCompilerControlCreateInfoAMD ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineCompilerControlCreateInfoAMD>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineCompilerControlCreateInfoAMD>
+  {
+    using Type = PipelineCompilerControlCreateInfoAMD;
+  };
+
+  struct PipelineCoverageModulationStateCreateInfoNV
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCoverageModulationStateCreateInfoNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PipelineCoverageModulationStateCreateInfoNV(VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_ = {}, VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_ = VULKAN_HPP_NAMESPACE::CoverageModulationModeNV::eNone, VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_ = {}, uint32_t coverageModulationTableCount_ = {}, const float* pCoverageModulationTable_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), coverageModulationMode( coverageModulationMode_ ), coverageModulationTableEnable( coverageModulationTableEnable_ ), coverageModulationTableCount( coverageModulationTableCount_ ), pCoverageModulationTable( pCoverageModulationTable_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineCoverageModulationStateCreateInfoNV( PipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineCoverageModulationStateCreateInfoNV( VkPipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    PipelineCoverageModulationStateCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_, VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_, VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const float> const & coverageModulationTable_ )
+    : flags( flags_ ), coverageModulationMode( coverageModulationMode_ ), coverageModulationTableEnable( coverageModulationTableEnable_ ), coverageModulationTableCount( static_cast<uint32_t>( coverageModulationTable_.size() ) ), pCoverageModulationTable( coverageModulationTable_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PipelineCoverageModulationStateCreateInfoNV & operator=( VkPipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateInfoNV const *>( &rhs );
+      return *this;
+    }
+
+    PipelineCoverageModulationStateCreateInfoNV & operator=( PipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineCoverageModulationStateCreateInfoNV ) );
+      return *this;
+    }
+
+    PipelineCoverageModulationStateCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineCoverageModulationStateCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
     {
       flags = flags_;
       return *this;
     }
 
-    PipelineCoverageModulationStateCreateInfoNV& setCoverageModulationMode( CoverageModulationModeNV coverageModulationMode_ )
+    PipelineCoverageModulationStateCreateInfoNV & setCoverageModulationMode( VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_ ) VULKAN_HPP_NOEXCEPT
     {
       coverageModulationMode = coverageModulationMode_;
       return *this;
     }
 
-    PipelineCoverageModulationStateCreateInfoNV& setCoverageModulationTableEnable( Bool32 coverageModulationTableEnable_ )
+    PipelineCoverageModulationStateCreateInfoNV & setCoverageModulationTableEnable( VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_ ) VULKAN_HPP_NOEXCEPT
     {
       coverageModulationTableEnable = coverageModulationTableEnable_;
       return *this;
     }
 
-    PipelineCoverageModulationStateCreateInfoNV& setCoverageModulationTableCount( uint32_t coverageModulationTableCount_ )
+    PipelineCoverageModulationStateCreateInfoNV & setCoverageModulationTableCount( uint32_t coverageModulationTableCount_ ) VULKAN_HPP_NOEXCEPT
     {
       coverageModulationTableCount = coverageModulationTableCount_;
       return *this;
     }
 
-    PipelineCoverageModulationStateCreateInfoNV& setPCoverageModulationTable( const float* pCoverageModulationTable_ )
+    PipelineCoverageModulationStateCreateInfoNV & setPCoverageModulationTable( const float* pCoverageModulationTable_ ) VULKAN_HPP_NOEXCEPT
     {
       pCoverageModulationTable = pCoverageModulationTable_;
       return *this;
     }
 
-    operator VkPipelineCoverageModulationStateCreateInfoNV const&() const
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    PipelineCoverageModulationStateCreateInfoNV & setCoverageModulationTable( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const float> const & coverageModulationTable_ ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkPipelineCoverageModulationStateCreateInfoNV*>(this);
+      coverageModulationTableCount = static_cast<uint32_t>( coverageModulationTable_.size() );
+      pCoverageModulationTable = coverageModulationTable_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkPipelineCoverageModulationStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineCoverageModulationStateCreateInfoNV*>( this );
     }
 
-    operator VkPipelineCoverageModulationStateCreateInfoNV &()
+    operator VkPipelineCoverageModulationStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkPipelineCoverageModulationStateCreateInfoNV*>(this);
+      return *reinterpret_cast<VkPipelineCoverageModulationStateCreateInfoNV*>( this );
     }
 
-    bool operator==( PipelineCoverageModulationStateCreateInfoNV const& rhs ) const
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PipelineCoverageModulationStateCreateInfoNV const& ) const = default;
+#else
+    bool operator==( PipelineCoverageModulationStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
@@ -35908,322 +73349,706 @@
           && ( pCoverageModulationTable == rhs.pCoverageModulationTable );
     }
 
-    bool operator!=( PipelineCoverageModulationStateCreateInfoNV const& rhs ) const
+    bool operator!=( PipelineCoverageModulationStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
+#endif
 
-  private:
-    StructureType sType = StructureType::ePipelineCoverageModulationStateCreateInfoNV;
+
 
   public:
-    const void* pNext = nullptr;
-    PipelineCoverageModulationStateCreateFlagsNV flags;
-    CoverageModulationModeNV coverageModulationMode;
-    Bool32 coverageModulationTableEnable;
-    uint32_t coverageModulationTableCount;
-    const float* pCoverageModulationTable;
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageModulationStateCreateInfoNV;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags = {};
+    VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode = VULKAN_HPP_NAMESPACE::CoverageModulationModeNV::eNone;
+    VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable = {};
+    uint32_t coverageModulationTableCount = {};
+    const float* pCoverageModulationTable = {};
+
   };
   static_assert( sizeof( PipelineCoverageModulationStateCreateInfoNV ) == sizeof( VkPipelineCoverageModulationStateCreateInfoNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineCoverageModulationStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
 
-  enum class ValidationCacheHeaderVersionEXT
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineCoverageModulationStateCreateInfoNV>
   {
-    eOne = VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT
+    using Type = PipelineCoverageModulationStateCreateInfoNV;
   };
 
-  enum class ShaderInfoTypeAMD
+  struct PipelineCoverageReductionStateCreateInfoNV
   {
-    eStatistics = VK_SHADER_INFO_TYPE_STATISTICS_AMD,
-    eBinary = VK_SHADER_INFO_TYPE_BINARY_AMD,
-    eDisassembly = VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD
-  };
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCoverageReductionStateCreateInfoNV;
 
-  enum class QueueGlobalPriorityEXT
-  {
-    eLow = VK_QUEUE_GLOBAL_PRIORITY_LOW_EXT,
-    eMedium = VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_EXT,
-    eHigh = VK_QUEUE_GLOBAL_PRIORITY_HIGH_EXT,
-    eRealtime = VK_QUEUE_GLOBAL_PRIORITY_REALTIME_EXT
-  };
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PipelineCoverageReductionStateCreateInfoNV(VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags_ = {}, VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), coverageReductionMode( coverageReductionMode_ )
+    {}
 
-  struct DeviceQueueGlobalPriorityCreateInfoEXT
-  {
-    DeviceQueueGlobalPriorityCreateInfoEXT( QueueGlobalPriorityEXT globalPriority_ = QueueGlobalPriorityEXT::eLow )
-      : globalPriority( globalPriority_ )
+    VULKAN_HPP_CONSTEXPR PipelineCoverageReductionStateCreateInfoNV( PipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineCoverageReductionStateCreateInfoNV( VkPipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
     {
+      *this = rhs;
     }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    DeviceQueueGlobalPriorityCreateInfoEXT( VkDeviceQueueGlobalPriorityCreateInfoEXT const & rhs )
+    PipelineCoverageReductionStateCreateInfoNV & operator=( VkPipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      memcpy( this, &rhs, sizeof( DeviceQueueGlobalPriorityCreateInfoEXT ) );
-    }
-
-    DeviceQueueGlobalPriorityCreateInfoEXT& operator=( VkDeviceQueueGlobalPriorityCreateInfoEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DeviceQueueGlobalPriorityCreateInfoEXT ) );
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateInfoNV const *>( &rhs );
       return *this;
     }
-    DeviceQueueGlobalPriorityCreateInfoEXT& setPNext( const void* pNext_ )
+
+    PipelineCoverageReductionStateCreateInfoNV & operator=( PipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineCoverageReductionStateCreateInfoNV ) );
+      return *this;
+    }
+
+    PipelineCoverageReductionStateCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
       pNext = pNext_;
       return *this;
     }
 
-    DeviceQueueGlobalPriorityCreateInfoEXT& setGlobalPriority( QueueGlobalPriorityEXT globalPriority_ )
-    {
-      globalPriority = globalPriority_;
-      return *this;
-    }
-
-    operator VkDeviceQueueGlobalPriorityCreateInfoEXT const&() const
-    {
-      return *reinterpret_cast<const VkDeviceQueueGlobalPriorityCreateInfoEXT*>(this);
-    }
-
-    operator VkDeviceQueueGlobalPriorityCreateInfoEXT &()
-    {
-      return *reinterpret_cast<VkDeviceQueueGlobalPriorityCreateInfoEXT*>(this);
-    }
-
-    bool operator==( DeviceQueueGlobalPriorityCreateInfoEXT const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( globalPriority == rhs.globalPriority );
-    }
-
-    bool operator!=( DeviceQueueGlobalPriorityCreateInfoEXT const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eDeviceQueueGlobalPriorityCreateInfoEXT;
-
-  public:
-    const void* pNext = nullptr;
-    QueueGlobalPriorityEXT globalPriority;
-  };
-  static_assert( sizeof( DeviceQueueGlobalPriorityCreateInfoEXT ) == sizeof( VkDeviceQueueGlobalPriorityCreateInfoEXT ), "struct and wrapper have different size!" );
-
-  enum class DebugUtilsMessageSeverityFlagBitsEXT
-  {
-    eVerbose = VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT,
-    eInfo = VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT,
-    eWarning = VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT,
-    eError = VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT
-  };
-
-  using DebugUtilsMessageSeverityFlagsEXT = Flags<DebugUtilsMessageSeverityFlagBitsEXT, VkDebugUtilsMessageSeverityFlagsEXT>;
-
-  VULKAN_HPP_INLINE DebugUtilsMessageSeverityFlagsEXT operator|( DebugUtilsMessageSeverityFlagBitsEXT bit0, DebugUtilsMessageSeverityFlagBitsEXT bit1 )
-  {
-    return DebugUtilsMessageSeverityFlagsEXT( bit0 ) | bit1;
-  }
-
-  VULKAN_HPP_INLINE DebugUtilsMessageSeverityFlagsEXT operator~( DebugUtilsMessageSeverityFlagBitsEXT bits )
-  {
-    return ~( DebugUtilsMessageSeverityFlagsEXT( bits ) );
-  }
-
-  template <> struct FlagTraits<DebugUtilsMessageSeverityFlagBitsEXT>
-  {
-    enum
-    {
-      allFlags = VkFlags(DebugUtilsMessageSeverityFlagBitsEXT::eVerbose) | VkFlags(DebugUtilsMessageSeverityFlagBitsEXT::eInfo) | VkFlags(DebugUtilsMessageSeverityFlagBitsEXT::eWarning) | VkFlags(DebugUtilsMessageSeverityFlagBitsEXT::eError)
-    };
-  };
-
-  enum class DebugUtilsMessageTypeFlagBitsEXT
-  {
-    eGeneral = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT,
-    eValidation = VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT,
-    ePerformance = VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT
-  };
-
-  using DebugUtilsMessageTypeFlagsEXT = Flags<DebugUtilsMessageTypeFlagBitsEXT, VkDebugUtilsMessageTypeFlagsEXT>;
-
-  VULKAN_HPP_INLINE DebugUtilsMessageTypeFlagsEXT operator|( DebugUtilsMessageTypeFlagBitsEXT bit0, DebugUtilsMessageTypeFlagBitsEXT bit1 )
-  {
-    return DebugUtilsMessageTypeFlagsEXT( bit0 ) | bit1;
-  }
-
-  VULKAN_HPP_INLINE DebugUtilsMessageTypeFlagsEXT operator~( DebugUtilsMessageTypeFlagBitsEXT bits )
-  {
-    return ~( DebugUtilsMessageTypeFlagsEXT( bits ) );
-  }
-
-  template <> struct FlagTraits<DebugUtilsMessageTypeFlagBitsEXT>
-  {
-    enum
-    {
-      allFlags = VkFlags(DebugUtilsMessageTypeFlagBitsEXT::eGeneral) | VkFlags(DebugUtilsMessageTypeFlagBitsEXT::eValidation) | VkFlags(DebugUtilsMessageTypeFlagBitsEXT::ePerformance)
-    };
-  };
-
-  struct DebugUtilsMessengerCreateInfoEXT
-  {
-    DebugUtilsMessengerCreateInfoEXT( DebugUtilsMessengerCreateFlagsEXT flags_ = DebugUtilsMessengerCreateFlagsEXT(),
-                                      DebugUtilsMessageSeverityFlagsEXT messageSeverity_ = DebugUtilsMessageSeverityFlagsEXT(),
-                                      DebugUtilsMessageTypeFlagsEXT messageType_ = DebugUtilsMessageTypeFlagsEXT(),
-                                      PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ = nullptr,
-                                      void* pUserData_ = nullptr )
-      : flags( flags_ )
-      , messageSeverity( messageSeverity_ )
-      , messageType( messageType_ )
-      , pfnUserCallback( pfnUserCallback_ )
-      , pUserData( pUserData_ )
-    {
-    }
-
-    DebugUtilsMessengerCreateInfoEXT( VkDebugUtilsMessengerCreateInfoEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DebugUtilsMessengerCreateInfoEXT ) );
-    }
-
-    DebugUtilsMessengerCreateInfoEXT& operator=( VkDebugUtilsMessengerCreateInfoEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DebugUtilsMessengerCreateInfoEXT ) );
-      return *this;
-    }
-    DebugUtilsMessengerCreateInfoEXT& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    DebugUtilsMessengerCreateInfoEXT& setFlags( DebugUtilsMessengerCreateFlagsEXT flags_ )
+    PipelineCoverageReductionStateCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
     {
       flags = flags_;
       return *this;
     }
 
-    DebugUtilsMessengerCreateInfoEXT& setMessageSeverity( DebugUtilsMessageSeverityFlagsEXT messageSeverity_ )
+    PipelineCoverageReductionStateCreateInfoNV & setCoverageReductionMode( VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ ) VULKAN_HPP_NOEXCEPT
     {
-      messageSeverity = messageSeverity_;
+      coverageReductionMode = coverageReductionMode_;
       return *this;
     }
 
-    DebugUtilsMessengerCreateInfoEXT& setMessageType( DebugUtilsMessageTypeFlagsEXT messageType_ )
+
+    operator VkPipelineCoverageReductionStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
     {
-      messageType = messageType_;
-      return *this;
+      return *reinterpret_cast<const VkPipelineCoverageReductionStateCreateInfoNV*>( this );
     }
 
-    DebugUtilsMessengerCreateInfoEXT& setPfnUserCallback( PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ )
+    operator VkPipelineCoverageReductionStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
     {
-      pfnUserCallback = pfnUserCallback_;
-      return *this;
+      return *reinterpret_cast<VkPipelineCoverageReductionStateCreateInfoNV*>( this );
     }
 
-    DebugUtilsMessengerCreateInfoEXT& setPUserData( void* pUserData_ )
-    {
-      pUserData = pUserData_;
-      return *this;
-    }
 
-    operator VkDebugUtilsMessengerCreateInfoEXT const&() const
-    {
-      return *reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT*>(this);
-    }
-
-    operator VkDebugUtilsMessengerCreateInfoEXT &()
-    {
-      return *reinterpret_cast<VkDebugUtilsMessengerCreateInfoEXT*>(this);
-    }
-
-    bool operator==( DebugUtilsMessengerCreateInfoEXT const& rhs ) const
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PipelineCoverageReductionStateCreateInfoNV const& ) const = default;
+#else
+    bool operator==( PipelineCoverageReductionStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
           && ( flags == rhs.flags )
-          && ( messageSeverity == rhs.messageSeverity )
-          && ( messageType == rhs.messageType )
-          && ( pfnUserCallback == rhs.pfnUserCallback )
-          && ( pUserData == rhs.pUserData );
+          && ( coverageReductionMode == rhs.coverageReductionMode );
     }
 
-    bool operator!=( DebugUtilsMessengerCreateInfoEXT const& rhs ) const
+    bool operator!=( PipelineCoverageReductionStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
+#endif
 
-  private:
-    StructureType sType = StructureType::eDebugUtilsMessengerCreateInfoEXT;
+
 
   public:
-    const void* pNext = nullptr;
-    DebugUtilsMessengerCreateFlagsEXT flags;
-    DebugUtilsMessageSeverityFlagsEXT messageSeverity;
-    DebugUtilsMessageTypeFlagsEXT messageType;
-    PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback;
-    void* pUserData;
-  };
-  static_assert( sizeof( DebugUtilsMessengerCreateInfoEXT ) == sizeof( VkDebugUtilsMessengerCreateInfoEXT ), "struct and wrapper have different size!" );
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageReductionStateCreateInfoNV;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags = {};
+    VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge;
 
-  enum class ConservativeRasterizationModeEXT
+  };
+  static_assert( sizeof( PipelineCoverageReductionStateCreateInfoNV ) == sizeof( VkPipelineCoverageReductionStateCreateInfoNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineCoverageReductionStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineCoverageReductionStateCreateInfoNV>
   {
-    eDisabled = VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT,
-    eOverestimate = VK_CONSERVATIVE_RASTERIZATION_MODE_OVERESTIMATE_EXT,
-    eUnderestimate = VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT
+    using Type = PipelineCoverageReductionStateCreateInfoNV;
   };
 
-  struct PipelineRasterizationConservativeStateCreateInfoEXT
+  struct PipelineCoverageToColorStateCreateInfoNV
   {
-    PipelineRasterizationConservativeStateCreateInfoEXT( PipelineRasterizationConservativeStateCreateFlagsEXT flags_ = PipelineRasterizationConservativeStateCreateFlagsEXT(),
-                                                         ConservativeRasterizationModeEXT conservativeRasterizationMode_ = ConservativeRasterizationModeEXT::eDisabled,
-                                                         float extraPrimitiveOverestimationSize_ = 0 )
-      : flags( flags_ )
-      , conservativeRasterizationMode( conservativeRasterizationMode_ )
-      , extraPrimitiveOverestimationSize( extraPrimitiveOverestimationSize_ )
-    {
-    }
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCoverageToColorStateCreateInfoNV;
 
-    PipelineRasterizationConservativeStateCreateInfoEXT( VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PipelineRasterizationConservativeStateCreateInfoEXT ) );
-    }
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PipelineCoverageToColorStateCreateInfoNV(VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags_ = {}, VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable_ = {}, uint32_t coverageToColorLocation_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), coverageToColorEnable( coverageToColorEnable_ ), coverageToColorLocation( coverageToColorLocation_ )
+    {}
 
-    PipelineRasterizationConservativeStateCreateInfoEXT& operator=( VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs )
+    VULKAN_HPP_CONSTEXPR PipelineCoverageToColorStateCreateInfoNV( PipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineCoverageToColorStateCreateInfoNV( VkPipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      memcpy( this, &rhs, sizeof( PipelineRasterizationConservativeStateCreateInfoEXT ) );
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PipelineCoverageToColorStateCreateInfoNV & operator=( VkPipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateInfoNV const *>( &rhs );
       return *this;
     }
-    PipelineRasterizationConservativeStateCreateInfoEXT& setPNext( const void* pNext_ )
+
+    PipelineCoverageToColorStateCreateInfoNV & operator=( PipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineCoverageToColorStateCreateInfoNV ) );
+      return *this;
+    }
+
+    PipelineCoverageToColorStateCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
       pNext = pNext_;
       return *this;
     }
 
-    PipelineRasterizationConservativeStateCreateInfoEXT& setFlags( PipelineRasterizationConservativeStateCreateFlagsEXT flags_ )
+    PipelineCoverageToColorStateCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
     {
       flags = flags_;
       return *this;
     }
 
-    PipelineRasterizationConservativeStateCreateInfoEXT& setConservativeRasterizationMode( ConservativeRasterizationModeEXT conservativeRasterizationMode_ )
+    PipelineCoverageToColorStateCreateInfoNV & setCoverageToColorEnable( VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      coverageToColorEnable = coverageToColorEnable_;
+      return *this;
+    }
+
+    PipelineCoverageToColorStateCreateInfoNV & setCoverageToColorLocation( uint32_t coverageToColorLocation_ ) VULKAN_HPP_NOEXCEPT
+    {
+      coverageToColorLocation = coverageToColorLocation_;
+      return *this;
+    }
+
+
+    operator VkPipelineCoverageToColorStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineCoverageToColorStateCreateInfoNV*>( this );
+    }
+
+    operator VkPipelineCoverageToColorStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineCoverageToColorStateCreateInfoNV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PipelineCoverageToColorStateCreateInfoNV const& ) const = default;
+#else
+    bool operator==( PipelineCoverageToColorStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( coverageToColorEnable == rhs.coverageToColorEnable )
+          && ( coverageToColorLocation == rhs.coverageToColorLocation );
+    }
+
+    bool operator!=( PipelineCoverageToColorStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageToColorStateCreateInfoNV;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags = {};
+    VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable = {};
+    uint32_t coverageToColorLocation = {};
+
+  };
+  static_assert( sizeof( PipelineCoverageToColorStateCreateInfoNV ) == sizeof( VkPipelineCoverageToColorStateCreateInfoNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineCoverageToColorStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineCoverageToColorStateCreateInfoNV>
+  {
+    using Type = PipelineCoverageToColorStateCreateInfoNV;
+  };
+
+  struct PipelineCreationFeedbackEXT
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackEXT(VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlagsEXT flags_ = {}, uint64_t duration_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), duration( duration_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackEXT( PipelineCreationFeedbackEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineCreationFeedbackEXT( VkPipelineCreationFeedbackEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PipelineCreationFeedbackEXT & operator=( VkPipelineCreationFeedbackEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT const *>( &rhs );
+      return *this;
+    }
+
+    PipelineCreationFeedbackEXT & operator=( PipelineCreationFeedbackEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineCreationFeedbackEXT ) );
+      return *this;
+    }
+
+
+    operator VkPipelineCreationFeedbackEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineCreationFeedbackEXT*>( this );
+    }
+
+    operator VkPipelineCreationFeedbackEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineCreationFeedbackEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PipelineCreationFeedbackEXT const& ) const = default;
+#else
+    bool operator==( PipelineCreationFeedbackEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( flags == rhs.flags )
+          && ( duration == rhs.duration );
+    }
+
+    bool operator!=( PipelineCreationFeedbackEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlagsEXT flags = {};
+    uint64_t duration = {};
+
+  };
+  static_assert( sizeof( PipelineCreationFeedbackEXT ) == sizeof( VkPipelineCreationFeedbackEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineCreationFeedbackEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct PipelineCreationFeedbackCreateInfoEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCreationFeedbackCreateInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackCreateInfoEXT(VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT* pPipelineCreationFeedback_ = {}, uint32_t pipelineStageCreationFeedbackCount_ = {}, VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT* pPipelineStageCreationFeedbacks_ = {}) VULKAN_HPP_NOEXCEPT
+    : pPipelineCreationFeedback( pPipelineCreationFeedback_ ), pipelineStageCreationFeedbackCount( pipelineStageCreationFeedbackCount_ ), pPipelineStageCreationFeedbacks( pPipelineStageCreationFeedbacks_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackCreateInfoEXT( PipelineCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineCreationFeedbackCreateInfoEXT( VkPipelineCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    PipelineCreationFeedbackCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT* pPipelineCreationFeedback_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT> const & pipelineStageCreationFeedbacks_ )
+    : pPipelineCreationFeedback( pPipelineCreationFeedback_ ), pipelineStageCreationFeedbackCount( static_cast<uint32_t>( pipelineStageCreationFeedbacks_.size() ) ), pPipelineStageCreationFeedbacks( pipelineStageCreationFeedbacks_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PipelineCreationFeedbackCreateInfoEXT & operator=( VkPipelineCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+    PipelineCreationFeedbackCreateInfoEXT & operator=( PipelineCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineCreationFeedbackCreateInfoEXT ) );
+      return *this;
+    }
+
+    PipelineCreationFeedbackCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineCreationFeedbackCreateInfoEXT & setPPipelineCreationFeedback( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT* pPipelineCreationFeedback_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pPipelineCreationFeedback = pPipelineCreationFeedback_;
+      return *this;
+    }
+
+    PipelineCreationFeedbackCreateInfoEXT & setPipelineStageCreationFeedbackCount( uint32_t pipelineStageCreationFeedbackCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipelineStageCreationFeedbackCount = pipelineStageCreationFeedbackCount_;
+      return *this;
+    }
+
+    PipelineCreationFeedbackCreateInfoEXT & setPPipelineStageCreationFeedbacks( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT* pPipelineStageCreationFeedbacks_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pPipelineStageCreationFeedbacks = pPipelineStageCreationFeedbacks_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    PipelineCreationFeedbackCreateInfoEXT & setPipelineStageCreationFeedbacks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT> const & pipelineStageCreationFeedbacks_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipelineStageCreationFeedbackCount = static_cast<uint32_t>( pipelineStageCreationFeedbacks_.size() );
+      pPipelineStageCreationFeedbacks = pipelineStageCreationFeedbacks_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkPipelineCreationFeedbackCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineCreationFeedbackCreateInfoEXT*>( this );
+    }
+
+    operator VkPipelineCreationFeedbackCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineCreationFeedbackCreateInfoEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PipelineCreationFeedbackCreateInfoEXT const& ) const = default;
+#else
+    bool operator==( PipelineCreationFeedbackCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pPipelineCreationFeedback == rhs.pPipelineCreationFeedback )
+          && ( pipelineStageCreationFeedbackCount == rhs.pipelineStageCreationFeedbackCount )
+          && ( pPipelineStageCreationFeedbacks == rhs.pPipelineStageCreationFeedbacks );
+    }
+
+    bool operator!=( PipelineCreationFeedbackCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCreationFeedbackCreateInfoEXT;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT* pPipelineCreationFeedback = {};
+    uint32_t pipelineStageCreationFeedbackCount = {};
+    VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT* pPipelineStageCreationFeedbacks = {};
+
+  };
+  static_assert( sizeof( PipelineCreationFeedbackCreateInfoEXT ) == sizeof( VkPipelineCreationFeedbackCreateInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineCreationFeedbackCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineCreationFeedbackCreateInfoEXT>
+  {
+    using Type = PipelineCreationFeedbackCreateInfoEXT;
+  };
+
+  struct PipelineDiscardRectangleStateCreateInfoEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineDiscardRectangleStateCreateInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PipelineDiscardRectangleStateCreateInfoEXT(VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_ = {}, VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_ = VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT::eInclusive, uint32_t discardRectangleCount_ = {}, const VULKAN_HPP_NAMESPACE::Rect2D* pDiscardRectangles_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), discardRectangleMode( discardRectangleMode_ ), discardRectangleCount( discardRectangleCount_ ), pDiscardRectangles( pDiscardRectangles_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineDiscardRectangleStateCreateInfoEXT( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineDiscardRectangleStateCreateInfoEXT( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    PipelineDiscardRectangleStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_, VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles_ )
+    : flags( flags_ ), discardRectangleMode( discardRectangleMode_ ), discardRectangleCount( static_cast<uint32_t>( discardRectangles_.size() ) ), pDiscardRectangles( discardRectangles_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PipelineDiscardRectangleStateCreateInfoEXT & operator=( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+    PipelineDiscardRectangleStateCreateInfoEXT & operator=( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineDiscardRectangleStateCreateInfoEXT ) );
+      return *this;
+    }
+
+    PipelineDiscardRectangleStateCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineDiscardRectangleStateCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    PipelineDiscardRectangleStateCreateInfoEXT & setDiscardRectangleMode( VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      discardRectangleMode = discardRectangleMode_;
+      return *this;
+    }
+
+    PipelineDiscardRectangleStateCreateInfoEXT & setDiscardRectangleCount( uint32_t discardRectangleCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      discardRectangleCount = discardRectangleCount_;
+      return *this;
+    }
+
+    PipelineDiscardRectangleStateCreateInfoEXT & setPDiscardRectangles( const VULKAN_HPP_NAMESPACE::Rect2D* pDiscardRectangles_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pDiscardRectangles = pDiscardRectangles_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    PipelineDiscardRectangleStateCreateInfoEXT & setDiscardRectangles( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles_ ) VULKAN_HPP_NOEXCEPT
+    {
+      discardRectangleCount = static_cast<uint32_t>( discardRectangles_.size() );
+      pDiscardRectangles = discardRectangles_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkPipelineDiscardRectangleStateCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineDiscardRectangleStateCreateInfoEXT*>( this );
+    }
+
+    operator VkPipelineDiscardRectangleStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineDiscardRectangleStateCreateInfoEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PipelineDiscardRectangleStateCreateInfoEXT const& ) const = default;
+#else
+    bool operator==( PipelineDiscardRectangleStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( discardRectangleMode == rhs.discardRectangleMode )
+          && ( discardRectangleCount == rhs.discardRectangleCount )
+          && ( pDiscardRectangles == rhs.pDiscardRectangles );
+    }
+
+    bool operator!=( PipelineDiscardRectangleStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDiscardRectangleStateCreateInfoEXT;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags = {};
+    VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode = VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT::eInclusive;
+    uint32_t discardRectangleCount = {};
+    const VULKAN_HPP_NAMESPACE::Rect2D* pDiscardRectangles = {};
+
+  };
+  static_assert( sizeof( PipelineDiscardRectangleStateCreateInfoEXT ) == sizeof( VkPipelineDiscardRectangleStateCreateInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineDiscardRectangleStateCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineDiscardRectangleStateCreateInfoEXT>
+  {
+    using Type = PipelineDiscardRectangleStateCreateInfoEXT;
+  };
+
+  struct PipelineFragmentShadingRateStateCreateInfoKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineFragmentShadingRateStateCreateInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR(VULKAN_HPP_NAMESPACE::Extent2D fragmentSize_ = {}, std::array<VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR,2> const& combinerOps_ = { { VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep, VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep } }) VULKAN_HPP_NOEXCEPT
+    : fragmentSize( fragmentSize_ ), combinerOps( combinerOps_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR( PipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineFragmentShadingRateStateCreateInfoKHR( VkPipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PipelineFragmentShadingRateStateCreateInfoKHR & operator=( VkPipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateStateCreateInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+    PipelineFragmentShadingRateStateCreateInfoKHR & operator=( PipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineFragmentShadingRateStateCreateInfoKHR ) );
+      return *this;
+    }
+
+    PipelineFragmentShadingRateStateCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineFragmentShadingRateStateCreateInfoKHR & setFragmentSize( VULKAN_HPP_NAMESPACE::Extent2D const & fragmentSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fragmentSize = fragmentSize_;
+      return *this;
+    }
+
+    PipelineFragmentShadingRateStateCreateInfoKHR & setCombinerOps( std::array<VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR,2> combinerOps_ ) VULKAN_HPP_NOEXCEPT
+    {
+      combinerOps = combinerOps_;
+      return *this;
+    }
+
+
+    operator VkPipelineFragmentShadingRateStateCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineFragmentShadingRateStateCreateInfoKHR*>( this );
+    }
+
+    operator VkPipelineFragmentShadingRateStateCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineFragmentShadingRateStateCreateInfoKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PipelineFragmentShadingRateStateCreateInfoKHR const& ) const = default;
+#else
+    bool operator==( PipelineFragmentShadingRateStateCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( fragmentSize == rhs.fragmentSize )
+          && ( combinerOps == rhs.combinerOps );
+    }
+
+    bool operator!=( PipelineFragmentShadingRateStateCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineFragmentShadingRateStateCreateInfoKHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Extent2D fragmentSize = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR, 2> combinerOps = { { VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep, VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep } };
+
+  };
+  static_assert( sizeof( PipelineFragmentShadingRateStateCreateInfoKHR ) == sizeof( VkPipelineFragmentShadingRateStateCreateInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineFragmentShadingRateStateCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineFragmentShadingRateStateCreateInfoKHR>
+  {
+    using Type = PipelineFragmentShadingRateStateCreateInfoKHR;
+  };
+
+  struct PipelineRasterizationConservativeStateCreateInfoEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PipelineRasterizationConservativeStateCreateInfoEXT(VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags_ = {}, VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode_ = VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT::eDisabled, float extraPrimitiveOverestimationSize_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), conservativeRasterizationMode( conservativeRasterizationMode_ ), extraPrimitiveOverestimationSize( extraPrimitiveOverestimationSize_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineRasterizationConservativeStateCreateInfoEXT( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineRasterizationConservativeStateCreateInfoEXT( VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PipelineRasterizationConservativeStateCreateInfoEXT & operator=( VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+    PipelineRasterizationConservativeStateCreateInfoEXT & operator=( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineRasterizationConservativeStateCreateInfoEXT ) );
+      return *this;
+    }
+
+    PipelineRasterizationConservativeStateCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineRasterizationConservativeStateCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    PipelineRasterizationConservativeStateCreateInfoEXT & setConservativeRasterizationMode( VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode_ ) VULKAN_HPP_NOEXCEPT
     {
       conservativeRasterizationMode = conservativeRasterizationMode_;
       return *this;
     }
 
-    PipelineRasterizationConservativeStateCreateInfoEXT& setExtraPrimitiveOverestimationSize( float extraPrimitiveOverestimationSize_ )
+    PipelineRasterizationConservativeStateCreateInfoEXT & setExtraPrimitiveOverestimationSize( float extraPrimitiveOverestimationSize_ ) VULKAN_HPP_NOEXCEPT
     {
       extraPrimitiveOverestimationSize = extraPrimitiveOverestimationSize_;
       return *this;
     }
 
-    operator VkPipelineRasterizationConservativeStateCreateInfoEXT const&() const
+
+    operator VkPipelineRasterizationConservativeStateCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkPipelineRasterizationConservativeStateCreateInfoEXT*>(this);
+      return *reinterpret_cast<const VkPipelineRasterizationConservativeStateCreateInfoEXT*>( this );
     }
 
-    operator VkPipelineRasterizationConservativeStateCreateInfoEXT &()
+    operator VkPipelineRasterizationConservativeStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkPipelineRasterizationConservativeStateCreateInfoEXT*>(this);
+      return *reinterpret_cast<VkPipelineRasterizationConservativeStateCreateInfoEXT*>( this );
     }
 
-    bool operator==( PipelineRasterizationConservativeStateCreateInfoEXT const& rhs ) const
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PipelineRasterizationConservativeStateCreateInfoEXT const& ) const = default;
+#else
+    bool operator==( PipelineRasterizationConservativeStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
@@ -36232,578 +74057,1037 @@
           && ( extraPrimitiveOverestimationSize == rhs.extraPrimitiveOverestimationSize );
     }
 
-    bool operator!=( PipelineRasterizationConservativeStateCreateInfoEXT const& rhs ) const
+    bool operator!=( PipelineRasterizationConservativeStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
+#endif
 
-  private:
-    StructureType sType = StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT;
+
 
   public:
-    const void* pNext = nullptr;
-    PipelineRasterizationConservativeStateCreateFlagsEXT flags;
-    ConservativeRasterizationModeEXT conservativeRasterizationMode;
-    float extraPrimitiveOverestimationSize;
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags = {};
+    VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode = VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT::eDisabled;
+    float extraPrimitiveOverestimationSize = {};
+
   };
   static_assert( sizeof( PipelineRasterizationConservativeStateCreateInfoEXT ) == sizeof( VkPipelineRasterizationConservativeStateCreateInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineRasterizationConservativeStateCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
 
-  enum class DescriptorBindingFlagBitsEXT
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT>
   {
-    eUpdateAfterBind = VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT,
-    eUpdateUnusedWhilePending = VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT,
-    ePartiallyBound = VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT,
-    eVariableDescriptorCount = VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT
+    using Type = PipelineRasterizationConservativeStateCreateInfoEXT;
   };
 
-  using DescriptorBindingFlagsEXT = Flags<DescriptorBindingFlagBitsEXT, VkDescriptorBindingFlagsEXT>;
-
-  VULKAN_HPP_INLINE DescriptorBindingFlagsEXT operator|( DescriptorBindingFlagBitsEXT bit0, DescriptorBindingFlagBitsEXT bit1 )
+  struct PipelineRasterizationDepthClipStateCreateInfoEXT
   {
-    return DescriptorBindingFlagsEXT( bit0 ) | bit1;
-  }
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT;
 
-  VULKAN_HPP_INLINE DescriptorBindingFlagsEXT operator~( DescriptorBindingFlagBitsEXT bits )
-  {
-    return ~( DescriptorBindingFlagsEXT( bits ) );
-  }
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PipelineRasterizationDepthClipStateCreateInfoEXT(VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), depthClipEnable( depthClipEnable_ )
+    {}
 
-  template <> struct FlagTraits<DescriptorBindingFlagBitsEXT>
-  {
-    enum
+    VULKAN_HPP_CONSTEXPR PipelineRasterizationDepthClipStateCreateInfoEXT( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineRasterizationDepthClipStateCreateInfoEXT( VkPipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      allFlags = VkFlags(DescriptorBindingFlagBitsEXT::eUpdateAfterBind) | VkFlags(DescriptorBindingFlagBitsEXT::eUpdateUnusedWhilePending) | VkFlags(DescriptorBindingFlagBitsEXT::ePartiallyBound) | VkFlags(DescriptorBindingFlagBitsEXT::eVariableDescriptorCount)
-    };
-  };
-
-  struct DescriptorSetLayoutBindingFlagsCreateInfoEXT
-  {
-    DescriptorSetLayoutBindingFlagsCreateInfoEXT( uint32_t bindingCount_ = 0,
-                                                  const DescriptorBindingFlagsEXT* pBindingFlags_ = nullptr )
-      : bindingCount( bindingCount_ )
-      , pBindingFlags( pBindingFlags_ )
-    {
+      *this = rhs;
     }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    DescriptorSetLayoutBindingFlagsCreateInfoEXT( VkDescriptorSetLayoutBindingFlagsCreateInfoEXT const & rhs )
+    PipelineRasterizationDepthClipStateCreateInfoEXT & operator=( VkPipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      memcpy( this, &rhs, sizeof( DescriptorSetLayoutBindingFlagsCreateInfoEXT ) );
-    }
-
-    DescriptorSetLayoutBindingFlagsCreateInfoEXT& operator=( VkDescriptorSetLayoutBindingFlagsCreateInfoEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DescriptorSetLayoutBindingFlagsCreateInfoEXT ) );
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateInfoEXT const *>( &rhs );
       return *this;
     }
-    DescriptorSetLayoutBindingFlagsCreateInfoEXT& setPNext( const void* pNext_ )
+
+    PipelineRasterizationDepthClipStateCreateInfoEXT & operator=( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineRasterizationDepthClipStateCreateInfoEXT ) );
+      return *this;
+    }
+
+    PipelineRasterizationDepthClipStateCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
       pNext = pNext_;
       return *this;
     }
 
-    DescriptorSetLayoutBindingFlagsCreateInfoEXT& setBindingCount( uint32_t bindingCount_ )
-    {
-      bindingCount = bindingCount_;
-      return *this;
-    }
-
-    DescriptorSetLayoutBindingFlagsCreateInfoEXT& setPBindingFlags( const DescriptorBindingFlagsEXT* pBindingFlags_ )
-    {
-      pBindingFlags = pBindingFlags_;
-      return *this;
-    }
-
-    operator VkDescriptorSetLayoutBindingFlagsCreateInfoEXT const&() const
-    {
-      return *reinterpret_cast<const VkDescriptorSetLayoutBindingFlagsCreateInfoEXT*>(this);
-    }
-
-    operator VkDescriptorSetLayoutBindingFlagsCreateInfoEXT &()
-    {
-      return *reinterpret_cast<VkDescriptorSetLayoutBindingFlagsCreateInfoEXT*>(this);
-    }
-
-    bool operator==( DescriptorSetLayoutBindingFlagsCreateInfoEXT const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( bindingCount == rhs.bindingCount )
-          && ( pBindingFlags == rhs.pBindingFlags );
-    }
-
-    bool operator!=( DescriptorSetLayoutBindingFlagsCreateInfoEXT const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eDescriptorSetLayoutBindingFlagsCreateInfoEXT;
-
-  public:
-    const void* pNext = nullptr;
-    uint32_t bindingCount;
-    const DescriptorBindingFlagsEXT* pBindingFlags;
-  };
-  static_assert( sizeof( DescriptorSetLayoutBindingFlagsCreateInfoEXT ) == sizeof( VkDescriptorSetLayoutBindingFlagsCreateInfoEXT ), "struct and wrapper have different size!" );
-
-  enum class VendorId
-  {
-    eViv = VK_VENDOR_ID_VIV,
-    eVsi = VK_VENDOR_ID_VSI,
-    eKazan = VK_VENDOR_ID_KAZAN
-  };
-
-  enum class DriverIdKHR
-  {
-    eAmdProprietary = VK_DRIVER_ID_AMD_PROPRIETARY_KHR,
-    eAmdOpenSource = VK_DRIVER_ID_AMD_OPEN_SOURCE_KHR,
-    eMesaRadv = VK_DRIVER_ID_MESA_RADV_KHR,
-    eNvidiaProprietary = VK_DRIVER_ID_NVIDIA_PROPRIETARY_KHR,
-    eIntelProprietaryWindows = VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS_KHR,
-    eIntelOpenSourceMesa = VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA_KHR,
-    eImaginationProprietary = VK_DRIVER_ID_IMAGINATION_PROPRIETARY_KHR,
-    eQualcommProprietary = VK_DRIVER_ID_QUALCOMM_PROPRIETARY_KHR,
-    eArmProprietary = VK_DRIVER_ID_ARM_PROPRIETARY_KHR
-  };
-
-  struct PhysicalDeviceDriverPropertiesKHR
-  {
-    operator VkPhysicalDeviceDriverPropertiesKHR const&() const
-    {
-      return *reinterpret_cast<const VkPhysicalDeviceDriverPropertiesKHR*>(this);
-    }
-
-    operator VkPhysicalDeviceDriverPropertiesKHR &()
-    {
-      return *reinterpret_cast<VkPhysicalDeviceDriverPropertiesKHR*>(this);
-    }
-
-    bool operator==( PhysicalDeviceDriverPropertiesKHR const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( driverID == rhs.driverID )
-          && ( memcmp( driverName, rhs.driverName, VK_MAX_DRIVER_NAME_SIZE_KHR * sizeof( char ) ) == 0 )
-          && ( memcmp( driverInfo, rhs.driverInfo, VK_MAX_DRIVER_INFO_SIZE_KHR * sizeof( char ) ) == 0 )
-          && ( conformanceVersion == rhs.conformanceVersion );
-    }
-
-    bool operator!=( PhysicalDeviceDriverPropertiesKHR const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePhysicalDeviceDriverPropertiesKHR;
-
-  public:
-    void* pNext = nullptr;
-    DriverIdKHR driverID;
-    char driverName[VK_MAX_DRIVER_NAME_SIZE_KHR];
-    char driverInfo[VK_MAX_DRIVER_INFO_SIZE_KHR];
-    ConformanceVersionKHR conformanceVersion;
-  };
-  static_assert( sizeof( PhysicalDeviceDriverPropertiesKHR ) == sizeof( VkPhysicalDeviceDriverPropertiesKHR ), "struct and wrapper have different size!" );
-
-  enum class ConditionalRenderingFlagBitsEXT
-  {
-    eInverted = VK_CONDITIONAL_RENDERING_INVERTED_BIT_EXT
-  };
-
-  using ConditionalRenderingFlagsEXT = Flags<ConditionalRenderingFlagBitsEXT, VkConditionalRenderingFlagsEXT>;
-
-  VULKAN_HPP_INLINE ConditionalRenderingFlagsEXT operator|( ConditionalRenderingFlagBitsEXT bit0, ConditionalRenderingFlagBitsEXT bit1 )
-  {
-    return ConditionalRenderingFlagsEXT( bit0 ) | bit1;
-  }
-
-  VULKAN_HPP_INLINE ConditionalRenderingFlagsEXT operator~( ConditionalRenderingFlagBitsEXT bits )
-  {
-    return ~( ConditionalRenderingFlagsEXT( bits ) );
-  }
-
-  template <> struct FlagTraits<ConditionalRenderingFlagBitsEXT>
-  {
-    enum
-    {
-      allFlags = VkFlags(ConditionalRenderingFlagBitsEXT::eInverted)
-    };
-  };
-
-  struct ConditionalRenderingBeginInfoEXT
-  {
-    ConditionalRenderingBeginInfoEXT( Buffer buffer_ = Buffer(),
-                                      DeviceSize offset_ = 0,
-                                      ConditionalRenderingFlagsEXT flags_ = ConditionalRenderingFlagsEXT() )
-      : buffer( buffer_ )
-      , offset( offset_ )
-      , flags( flags_ )
-    {
-    }
-
-    ConditionalRenderingBeginInfoEXT( VkConditionalRenderingBeginInfoEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ConditionalRenderingBeginInfoEXT ) );
-    }
-
-    ConditionalRenderingBeginInfoEXT& operator=( VkConditionalRenderingBeginInfoEXT const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ConditionalRenderingBeginInfoEXT ) );
-      return *this;
-    }
-    ConditionalRenderingBeginInfoEXT& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    ConditionalRenderingBeginInfoEXT& setBuffer( Buffer buffer_ )
-    {
-      buffer = buffer_;
-      return *this;
-    }
-
-    ConditionalRenderingBeginInfoEXT& setOffset( DeviceSize offset_ )
-    {
-      offset = offset_;
-      return *this;
-    }
-
-    ConditionalRenderingBeginInfoEXT& setFlags( ConditionalRenderingFlagsEXT flags_ )
+    PipelineRasterizationDepthClipStateCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
     {
       flags = flags_;
       return *this;
     }
 
-    operator VkConditionalRenderingBeginInfoEXT const&() const
+    PipelineRasterizationDepthClipStateCreateInfoEXT & setDepthClipEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkConditionalRenderingBeginInfoEXT*>(this);
+      depthClipEnable = depthClipEnable_;
+      return *this;
     }
 
-    operator VkConditionalRenderingBeginInfoEXT &()
+
+    operator VkPipelineRasterizationDepthClipStateCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkConditionalRenderingBeginInfoEXT*>(this);
+      return *reinterpret_cast<const VkPipelineRasterizationDepthClipStateCreateInfoEXT*>( this );
     }
 
-    bool operator==( ConditionalRenderingBeginInfoEXT const& rhs ) const
+    operator VkPipelineRasterizationDepthClipStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineRasterizationDepthClipStateCreateInfoEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PipelineRasterizationDepthClipStateCreateInfoEXT const& ) const = default;
+#else
+    bool operator==( PipelineRasterizationDepthClipStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
-          && ( buffer == rhs.buffer )
-          && ( offset == rhs.offset )
-          && ( flags == rhs.flags );
+          && ( flags == rhs.flags )
+          && ( depthClipEnable == rhs.depthClipEnable );
     }
 
-    bool operator!=( ConditionalRenderingBeginInfoEXT const& rhs ) const
+    bool operator!=( PipelineRasterizationDepthClipStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
+#endif
 
-  private:
-    StructureType sType = StructureType::eConditionalRenderingBeginInfoEXT;
+
 
   public:
-    const void* pNext = nullptr;
-    Buffer buffer;
-    DeviceSize offset;
-    ConditionalRenderingFlagsEXT flags;
-  };
-  static_assert( sizeof( ConditionalRenderingBeginInfoEXT ) == sizeof( VkConditionalRenderingBeginInfoEXT ), "struct and wrapper have different size!" );
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags = {};
+    VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable = {};
 
-  enum class ShadingRatePaletteEntryNV
+  };
+  static_assert( sizeof( PipelineRasterizationDepthClipStateCreateInfoEXT ) == sizeof( VkPipelineRasterizationDepthClipStateCreateInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineRasterizationDepthClipStateCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT>
   {
-    eNoInvocations = VK_SHADING_RATE_PALETTE_ENTRY_NO_INVOCATIONS_NV,
-    e16InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_16_INVOCATIONS_PER_PIXEL_NV,
-    e8InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_8_INVOCATIONS_PER_PIXEL_NV,
-    e4InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_4_INVOCATIONS_PER_PIXEL_NV,
-    e2InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_2_INVOCATIONS_PER_PIXEL_NV,
-    e1InvocationPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_PIXEL_NV,
-    e1InvocationPer2X1Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X1_PIXELS_NV,
-    e1InvocationPer1X2Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_1X2_PIXELS_NV,
-    e1InvocationPer2X2Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X2_PIXELS_NV,
-    e1InvocationPer4X2Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X2_PIXELS_NV,
-    e1InvocationPer2X4Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X4_PIXELS_NV,
-    e1InvocationPer4X4Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X4_PIXELS_NV
+    using Type = PipelineRasterizationDepthClipStateCreateInfoEXT;
   };
 
-  struct ShadingRatePaletteNV
+  struct PipelineRasterizationLineStateCreateInfoEXT
   {
-    ShadingRatePaletteNV( uint32_t shadingRatePaletteEntryCount_ = 0,
-                          const ShadingRatePaletteEntryNV* pShadingRatePaletteEntries_ = nullptr )
-      : shadingRatePaletteEntryCount( shadingRatePaletteEntryCount_ )
-      , pShadingRatePaletteEntries( pShadingRatePaletteEntries_ )
-    {
-    }
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationLineStateCreateInfoEXT;
 
-    ShadingRatePaletteNV( VkShadingRatePaletteNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( ShadingRatePaletteNV ) );
-    }
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PipelineRasterizationLineStateCreateInfoEXT(VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode_ = VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT::eDefault, VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable_ = {}, uint32_t lineStippleFactor_ = {}, uint16_t lineStipplePattern_ = {}) VULKAN_HPP_NOEXCEPT
+    : lineRasterizationMode( lineRasterizationMode_ ), stippledLineEnable( stippledLineEnable_ ), lineStippleFactor( lineStippleFactor_ ), lineStipplePattern( lineStipplePattern_ )
+    {}
 
-    ShadingRatePaletteNV& operator=( VkShadingRatePaletteNV const & rhs )
+    VULKAN_HPP_CONSTEXPR PipelineRasterizationLineStateCreateInfoEXT( PipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineRasterizationLineStateCreateInfoEXT( VkPipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      memcpy( this, &rhs, sizeof( ShadingRatePaletteNV ) );
-      return *this;
+      *this = rhs;
     }
-    ShadingRatePaletteNV& setShadingRatePaletteEntryCount( uint32_t shadingRatePaletteEntryCount_ )
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PipelineRasterizationLineStateCreateInfoEXT & operator=( VkPipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      shadingRatePaletteEntryCount = shadingRatePaletteEntryCount_;
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfoEXT const *>( &rhs );
       return *this;
     }
 
-    ShadingRatePaletteNV& setPShadingRatePaletteEntries( const ShadingRatePaletteEntryNV* pShadingRatePaletteEntries_ )
+    PipelineRasterizationLineStateCreateInfoEXT & operator=( PipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      pShadingRatePaletteEntries = pShadingRatePaletteEntries_;
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineRasterizationLineStateCreateInfoEXT ) );
       return *this;
     }
 
-    operator VkShadingRatePaletteNV const&() const
-    {
-      return *reinterpret_cast<const VkShadingRatePaletteNV*>(this);
-    }
-
-    operator VkShadingRatePaletteNV &()
-    {
-      return *reinterpret_cast<VkShadingRatePaletteNV*>(this);
-    }
-
-    bool operator==( ShadingRatePaletteNV const& rhs ) const
-    {
-      return ( shadingRatePaletteEntryCount == rhs.shadingRatePaletteEntryCount )
-          && ( pShadingRatePaletteEntries == rhs.pShadingRatePaletteEntries );
-    }
-
-    bool operator!=( ShadingRatePaletteNV const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    uint32_t shadingRatePaletteEntryCount;
-    const ShadingRatePaletteEntryNV* pShadingRatePaletteEntries;
-  };
-  static_assert( sizeof( ShadingRatePaletteNV ) == sizeof( VkShadingRatePaletteNV ), "struct and wrapper have different size!" );
-
-  struct PipelineViewportShadingRateImageStateCreateInfoNV
-  {
-    PipelineViewportShadingRateImageStateCreateInfoNV( Bool32 shadingRateImageEnable_ = 0,
-                                                       uint32_t viewportCount_ = 0,
-                                                       const ShadingRatePaletteNV* pShadingRatePalettes_ = nullptr )
-      : shadingRateImageEnable( shadingRateImageEnable_ )
-      , viewportCount( viewportCount_ )
-      , pShadingRatePalettes( pShadingRatePalettes_ )
-    {
-    }
-
-    PipelineViewportShadingRateImageStateCreateInfoNV( VkPipelineViewportShadingRateImageStateCreateInfoNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PipelineViewportShadingRateImageStateCreateInfoNV ) );
-    }
-
-    PipelineViewportShadingRateImageStateCreateInfoNV& operator=( VkPipelineViewportShadingRateImageStateCreateInfoNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PipelineViewportShadingRateImageStateCreateInfoNV ) );
-      return *this;
-    }
-    PipelineViewportShadingRateImageStateCreateInfoNV& setPNext( const void* pNext_ )
+    PipelineRasterizationLineStateCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
       pNext = pNext_;
       return *this;
     }
 
-    PipelineViewportShadingRateImageStateCreateInfoNV& setShadingRateImageEnable( Bool32 shadingRateImageEnable_ )
+    PipelineRasterizationLineStateCreateInfoEXT & setLineRasterizationMode( VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode_ ) VULKAN_HPP_NOEXCEPT
     {
-      shadingRateImageEnable = shadingRateImageEnable_;
+      lineRasterizationMode = lineRasterizationMode_;
       return *this;
     }
 
-    PipelineViewportShadingRateImageStateCreateInfoNV& setViewportCount( uint32_t viewportCount_ )
+    PipelineRasterizationLineStateCreateInfoEXT & setStippledLineEnable( VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable_ ) VULKAN_HPP_NOEXCEPT
     {
-      viewportCount = viewportCount_;
+      stippledLineEnable = stippledLineEnable_;
       return *this;
     }
 
-    PipelineViewportShadingRateImageStateCreateInfoNV& setPShadingRatePalettes( const ShadingRatePaletteNV* pShadingRatePalettes_ )
+    PipelineRasterizationLineStateCreateInfoEXT & setLineStippleFactor( uint32_t lineStippleFactor_ ) VULKAN_HPP_NOEXCEPT
     {
-      pShadingRatePalettes = pShadingRatePalettes_;
+      lineStippleFactor = lineStippleFactor_;
       return *this;
     }
 
-    operator VkPipelineViewportShadingRateImageStateCreateInfoNV const&() const
+    PipelineRasterizationLineStateCreateInfoEXT & setLineStipplePattern( uint16_t lineStipplePattern_ ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkPipelineViewportShadingRateImageStateCreateInfoNV*>(this);
+      lineStipplePattern = lineStipplePattern_;
+      return *this;
     }
 
-    operator VkPipelineViewportShadingRateImageStateCreateInfoNV &()
+
+    operator VkPipelineRasterizationLineStateCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkPipelineViewportShadingRateImageStateCreateInfoNV*>(this);
+      return *reinterpret_cast<const VkPipelineRasterizationLineStateCreateInfoEXT*>( this );
     }
 
-    bool operator==( PipelineViewportShadingRateImageStateCreateInfoNV const& rhs ) const
+    operator VkPipelineRasterizationLineStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineRasterizationLineStateCreateInfoEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PipelineRasterizationLineStateCreateInfoEXT const& ) const = default;
+#else
+    bool operator==( PipelineRasterizationLineStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
-          && ( shadingRateImageEnable == rhs.shadingRateImageEnable )
-          && ( viewportCount == rhs.viewportCount )
-          && ( pShadingRatePalettes == rhs.pShadingRatePalettes );
+          && ( lineRasterizationMode == rhs.lineRasterizationMode )
+          && ( stippledLineEnable == rhs.stippledLineEnable )
+          && ( lineStippleFactor == rhs.lineStippleFactor )
+          && ( lineStipplePattern == rhs.lineStipplePattern );
     }
 
-    bool operator!=( PipelineViewportShadingRateImageStateCreateInfoNV const& rhs ) const
+    bool operator!=( PipelineRasterizationLineStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
+#endif
 
-  private:
-    StructureType sType = StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV;
+
 
   public:
-    const void* pNext = nullptr;
-    Bool32 shadingRateImageEnable;
-    uint32_t viewportCount;
-    const ShadingRatePaletteNV* pShadingRatePalettes;
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationLineStateCreateInfoEXT;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode = VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT::eDefault;
+    VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable = {};
+    uint32_t lineStippleFactor = {};
+    uint16_t lineStipplePattern = {};
+
   };
-  static_assert( sizeof( PipelineViewportShadingRateImageStateCreateInfoNV ) == sizeof( VkPipelineViewportShadingRateImageStateCreateInfoNV ), "struct and wrapper have different size!" );
+  static_assert( sizeof( PipelineRasterizationLineStateCreateInfoEXT ) == sizeof( VkPipelineRasterizationLineStateCreateInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineRasterizationLineStateCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
 
-  struct CoarseSampleOrderCustomNV
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineRasterizationLineStateCreateInfoEXT>
   {
-    CoarseSampleOrderCustomNV( ShadingRatePaletteEntryNV shadingRate_ = ShadingRatePaletteEntryNV::eNoInvocations,
-                               uint32_t sampleCount_ = 0,
-                               uint32_t sampleLocationCount_ = 0,
-                               const CoarseSampleLocationNV* pSampleLocations_ = nullptr )
-      : shadingRate( shadingRate_ )
-      , sampleCount( sampleCount_ )
-      , sampleLocationCount( sampleLocationCount_ )
-      , pSampleLocations( pSampleLocations_ )
-    {
-    }
+    using Type = PipelineRasterizationLineStateCreateInfoEXT;
+  };
 
-    CoarseSampleOrderCustomNV( VkCoarseSampleOrderCustomNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( CoarseSampleOrderCustomNV ) );
-    }
+  struct PipelineRasterizationStateRasterizationOrderAMD
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationStateRasterizationOrderAMD;
 
-    CoarseSampleOrderCustomNV& operator=( VkCoarseSampleOrderCustomNV const & rhs )
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PipelineRasterizationStateRasterizationOrderAMD(VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder_ = VULKAN_HPP_NAMESPACE::RasterizationOrderAMD::eStrict) VULKAN_HPP_NOEXCEPT
+    : rasterizationOrder( rasterizationOrder_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineRasterizationStateRasterizationOrderAMD( PipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineRasterizationStateRasterizationOrderAMD( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      memcpy( this, &rhs, sizeof( CoarseSampleOrderCustomNV ) );
-      return *this;
+      *this = rhs;
     }
-    CoarseSampleOrderCustomNV& setShadingRate( ShadingRatePaletteEntryNV shadingRate_ )
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PipelineRasterizationStateRasterizationOrderAMD & operator=( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      shadingRate = shadingRate_;
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateRasterizationOrderAMD const *>( &rhs );
       return *this;
     }
 
-    CoarseSampleOrderCustomNV& setSampleCount( uint32_t sampleCount_ )
+    PipelineRasterizationStateRasterizationOrderAMD & operator=( PipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      sampleCount = sampleCount_;
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineRasterizationStateRasterizationOrderAMD ) );
       return *this;
     }
 
-    CoarseSampleOrderCustomNV& setSampleLocationCount( uint32_t sampleLocationCount_ )
+    PipelineRasterizationStateRasterizationOrderAMD & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
-      sampleLocationCount = sampleLocationCount_;
+      pNext = pNext_;
       return *this;
     }
 
-    CoarseSampleOrderCustomNV& setPSampleLocations( const CoarseSampleLocationNV* pSampleLocations_ )
+    PipelineRasterizationStateRasterizationOrderAMD & setRasterizationOrder( VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder_ ) VULKAN_HPP_NOEXCEPT
     {
-      pSampleLocations = pSampleLocations_;
+      rasterizationOrder = rasterizationOrder_;
       return *this;
     }
 
-    operator VkCoarseSampleOrderCustomNV const&() const
+
+    operator VkPipelineRasterizationStateRasterizationOrderAMD const&() const VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkCoarseSampleOrderCustomNV*>(this);
+      return *reinterpret_cast<const VkPipelineRasterizationStateRasterizationOrderAMD*>( this );
     }
 
-    operator VkCoarseSampleOrderCustomNV &()
+    operator VkPipelineRasterizationStateRasterizationOrderAMD &() VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkCoarseSampleOrderCustomNV*>(this);
+      return *reinterpret_cast<VkPipelineRasterizationStateRasterizationOrderAMD*>( this );
     }
 
-    bool operator==( CoarseSampleOrderCustomNV const& rhs ) const
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PipelineRasterizationStateRasterizationOrderAMD const& ) const = default;
+#else
+    bool operator==( PipelineRasterizationStateRasterizationOrderAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
-      return ( shadingRate == rhs.shadingRate )
-          && ( sampleCount == rhs.sampleCount )
-          && ( sampleLocationCount == rhs.sampleLocationCount )
-          && ( pSampleLocations == rhs.pSampleLocations );
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( rasterizationOrder == rhs.rasterizationOrder );
     }
 
-    bool operator!=( CoarseSampleOrderCustomNV const& rhs ) const
+    bool operator!=( PipelineRasterizationStateRasterizationOrderAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
+#endif
 
-    ShadingRatePaletteEntryNV shadingRate;
-    uint32_t sampleCount;
-    uint32_t sampleLocationCount;
-    const CoarseSampleLocationNV* pSampleLocations;
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateRasterizationOrderAMD;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder = VULKAN_HPP_NAMESPACE::RasterizationOrderAMD::eStrict;
+
   };
-  static_assert( sizeof( CoarseSampleOrderCustomNV ) == sizeof( VkCoarseSampleOrderCustomNV ), "struct and wrapper have different size!" );
+  static_assert( sizeof( PipelineRasterizationStateRasterizationOrderAMD ) == sizeof( VkPipelineRasterizationStateRasterizationOrderAMD ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineRasterizationStateRasterizationOrderAMD>::value, "struct wrapper is not a standard layout!" );
 
-  enum class CoarseSampleOrderTypeNV
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineRasterizationStateRasterizationOrderAMD>
   {
-    eDefault = VK_COARSE_SAMPLE_ORDER_TYPE_DEFAULT_NV,
-    eCustom = VK_COARSE_SAMPLE_ORDER_TYPE_CUSTOM_NV,
-    ePixelMajor = VK_COARSE_SAMPLE_ORDER_TYPE_PIXEL_MAJOR_NV,
-    eSampleMajor = VK_COARSE_SAMPLE_ORDER_TYPE_SAMPLE_MAJOR_NV
+    using Type = PipelineRasterizationStateRasterizationOrderAMD;
+  };
+
+  struct PipelineRasterizationStateStreamCreateInfoEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationStateStreamCreateInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PipelineRasterizationStateStreamCreateInfoEXT(VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags_ = {}, uint32_t rasterizationStream_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), rasterizationStream( rasterizationStream_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineRasterizationStateStreamCreateInfoEXT( PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineRasterizationStateStreamCreateInfoEXT( VkPipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PipelineRasterizationStateStreamCreateInfoEXT & operator=( VkPipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+    PipelineRasterizationStateStreamCreateInfoEXT & operator=( PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineRasterizationStateStreamCreateInfoEXT ) );
+      return *this;
+    }
+
+    PipelineRasterizationStateStreamCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineRasterizationStateStreamCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    PipelineRasterizationStateStreamCreateInfoEXT & setRasterizationStream( uint32_t rasterizationStream_ ) VULKAN_HPP_NOEXCEPT
+    {
+      rasterizationStream = rasterizationStream_;
+      return *this;
+    }
+
+
+    operator VkPipelineRasterizationStateStreamCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineRasterizationStateStreamCreateInfoEXT*>( this );
+    }
+
+    operator VkPipelineRasterizationStateStreamCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineRasterizationStateStreamCreateInfoEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PipelineRasterizationStateStreamCreateInfoEXT const& ) const = default;
+#else
+    bool operator==( PipelineRasterizationStateStreamCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( rasterizationStream == rhs.rasterizationStream );
+    }
+
+    bool operator!=( PipelineRasterizationStateStreamCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateStreamCreateInfoEXT;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags = {};
+    uint32_t rasterizationStream = {};
+
+  };
+  static_assert( sizeof( PipelineRasterizationStateStreamCreateInfoEXT ) == sizeof( VkPipelineRasterizationStateStreamCreateInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineRasterizationStateStreamCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineRasterizationStateStreamCreateInfoEXT>
+  {
+    using Type = PipelineRasterizationStateStreamCreateInfoEXT;
+  };
+
+  struct PipelineRepresentativeFragmentTestStateCreateInfoNV
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PipelineRepresentativeFragmentTestStateCreateInfoNV(VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable_ = {}) VULKAN_HPP_NOEXCEPT
+    : representativeFragmentTestEnable( representativeFragmentTestEnable_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineRepresentativeFragmentTestStateCreateInfoNV( PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineRepresentativeFragmentTestStateCreateInfoNV( VkPipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PipelineRepresentativeFragmentTestStateCreateInfoNV & operator=( VkPipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRepresentativeFragmentTestStateCreateInfoNV const *>( &rhs );
+      return *this;
+    }
+
+    PipelineRepresentativeFragmentTestStateCreateInfoNV & operator=( PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineRepresentativeFragmentTestStateCreateInfoNV ) );
+      return *this;
+    }
+
+    PipelineRepresentativeFragmentTestStateCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineRepresentativeFragmentTestStateCreateInfoNV & setRepresentativeFragmentTestEnable( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      representativeFragmentTestEnable = representativeFragmentTestEnable_;
+      return *this;
+    }
+
+
+    operator VkPipelineRepresentativeFragmentTestStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineRepresentativeFragmentTestStateCreateInfoNV*>( this );
+    }
+
+    operator VkPipelineRepresentativeFragmentTestStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineRepresentativeFragmentTestStateCreateInfoNV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PipelineRepresentativeFragmentTestStateCreateInfoNV const& ) const = default;
+#else
+    bool operator==( PipelineRepresentativeFragmentTestStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( representativeFragmentTestEnable == rhs.representativeFragmentTestEnable );
+    }
+
+    bool operator!=( PipelineRepresentativeFragmentTestStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable = {};
+
+  };
+  static_assert( sizeof( PipelineRepresentativeFragmentTestStateCreateInfoNV ) == sizeof( VkPipelineRepresentativeFragmentTestStateCreateInfoNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineRepresentativeFragmentTestStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV>
+  {
+    using Type = PipelineRepresentativeFragmentTestStateCreateInfoNV;
+  };
+
+  struct PipelineSampleLocationsStateCreateInfoEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineSampleLocationsStateCreateInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PipelineSampleLocationsStateCreateInfoEXT(VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable_ = {}, VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {}) VULKAN_HPP_NOEXCEPT
+    : sampleLocationsEnable( sampleLocationsEnable_ ), sampleLocationsInfo( sampleLocationsInfo_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineSampleLocationsStateCreateInfoEXT( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineSampleLocationsStateCreateInfoEXT( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PipelineSampleLocationsStateCreateInfoEXT & operator=( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineSampleLocationsStateCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+    PipelineSampleLocationsStateCreateInfoEXT & operator=( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineSampleLocationsStateCreateInfoEXT ) );
+      return *this;
+    }
+
+    PipelineSampleLocationsStateCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineSampleLocationsStateCreateInfoEXT & setSampleLocationsEnable( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sampleLocationsEnable = sampleLocationsEnable_;
+      return *this;
+    }
+
+    PipelineSampleLocationsStateCreateInfoEXT & setSampleLocationsInfo( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sampleLocationsInfo = sampleLocationsInfo_;
+      return *this;
+    }
+
+
+    operator VkPipelineSampleLocationsStateCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineSampleLocationsStateCreateInfoEXT*>( this );
+    }
+
+    operator VkPipelineSampleLocationsStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineSampleLocationsStateCreateInfoEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PipelineSampleLocationsStateCreateInfoEXT const& ) const = default;
+#else
+    bool operator==( PipelineSampleLocationsStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( sampleLocationsEnable == rhs.sampleLocationsEnable )
+          && ( sampleLocationsInfo == rhs.sampleLocationsInfo );
+    }
+
+    bool operator!=( PipelineSampleLocationsStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineSampleLocationsStateCreateInfoEXT;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable = {};
+    VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo = {};
+
+  };
+  static_assert( sizeof( PipelineSampleLocationsStateCreateInfoEXT ) == sizeof( VkPipelineSampleLocationsStateCreateInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineSampleLocationsStateCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineSampleLocationsStateCreateInfoEXT>
+  {
+    using Type = PipelineSampleLocationsStateCreateInfoEXT;
+  };
+
+  struct PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT(uint32_t requiredSubgroupSize_ = {}) VULKAN_HPP_NOEXCEPT
+    : requiredSubgroupSize( requiredSubgroupSize_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT( VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT & operator=( VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+    PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT & operator=( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT ) );
+      return *this;
+    }
+
+
+    operator VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT*>( this );
+    }
+
+    operator VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const& ) const = default;
+#else
+    bool operator==( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( requiredSubgroupSize == rhs.requiredSubgroupSize );
+    }
+
+    bool operator!=( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfoEXT;
+    void* pNext = {};
+    uint32_t requiredSubgroupSize = {};
+
+  };
+  static_assert( sizeof( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT ) == sizeof( VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfoEXT>
+  {
+    using Type = PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT;
+  };
+
+  struct PipelineTessellationDomainOriginStateCreateInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineTessellationDomainOriginStateCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PipelineTessellationDomainOriginStateCreateInfo(VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin_ = VULKAN_HPP_NAMESPACE::TessellationDomainOrigin::eUpperLeft) VULKAN_HPP_NOEXCEPT
+    : domainOrigin( domainOrigin_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineTessellationDomainOriginStateCreateInfo( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineTessellationDomainOriginStateCreateInfo( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PipelineTessellationDomainOriginStateCreateInfo & operator=( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineTessellationDomainOriginStateCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+    PipelineTessellationDomainOriginStateCreateInfo & operator=( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineTessellationDomainOriginStateCreateInfo ) );
+      return *this;
+    }
+
+    PipelineTessellationDomainOriginStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineTessellationDomainOriginStateCreateInfo & setDomainOrigin( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin_ ) VULKAN_HPP_NOEXCEPT
+    {
+      domainOrigin = domainOrigin_;
+      return *this;
+    }
+
+
+    operator VkPipelineTessellationDomainOriginStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineTessellationDomainOriginStateCreateInfo*>( this );
+    }
+
+    operator VkPipelineTessellationDomainOriginStateCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineTessellationDomainOriginStateCreateInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PipelineTessellationDomainOriginStateCreateInfo const& ) const = default;
+#else
+    bool operator==( PipelineTessellationDomainOriginStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( domainOrigin == rhs.domainOrigin );
+    }
+
+    bool operator!=( PipelineTessellationDomainOriginStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineTessellationDomainOriginStateCreateInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin = VULKAN_HPP_NAMESPACE::TessellationDomainOrigin::eUpperLeft;
+
+  };
+  static_assert( sizeof( PipelineTessellationDomainOriginStateCreateInfo ) == sizeof( VkPipelineTessellationDomainOriginStateCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineTessellationDomainOriginStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineTessellationDomainOriginStateCreateInfo>
+  {
+    using Type = PipelineTessellationDomainOriginStateCreateInfo;
+  };
+  using PipelineTessellationDomainOriginStateCreateInfoKHR = PipelineTessellationDomainOriginStateCreateInfo;
+
+  struct VertexInputBindingDivisorDescriptionEXT
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR VertexInputBindingDivisorDescriptionEXT(uint32_t binding_ = {}, uint32_t divisor_ = {}) VULKAN_HPP_NOEXCEPT
+    : binding( binding_ ), divisor( divisor_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR VertexInputBindingDivisorDescriptionEXT( VertexInputBindingDivisorDescriptionEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VertexInputBindingDivisorDescriptionEXT( VkVertexInputBindingDivisorDescriptionEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    VertexInputBindingDivisorDescriptionEXT & operator=( VkVertexInputBindingDivisorDescriptionEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT const *>( &rhs );
+      return *this;
+    }
+
+    VertexInputBindingDivisorDescriptionEXT & operator=( VertexInputBindingDivisorDescriptionEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( VertexInputBindingDivisorDescriptionEXT ) );
+      return *this;
+    }
+
+    VertexInputBindingDivisorDescriptionEXT & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT
+    {
+      binding = binding_;
+      return *this;
+    }
+
+    VertexInputBindingDivisorDescriptionEXT & setDivisor( uint32_t divisor_ ) VULKAN_HPP_NOEXCEPT
+    {
+      divisor = divisor_;
+      return *this;
+    }
+
+
+    operator VkVertexInputBindingDivisorDescriptionEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVertexInputBindingDivisorDescriptionEXT*>( this );
+    }
+
+    operator VkVertexInputBindingDivisorDescriptionEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVertexInputBindingDivisorDescriptionEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( VertexInputBindingDivisorDescriptionEXT const& ) const = default;
+#else
+    bool operator==( VertexInputBindingDivisorDescriptionEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( binding == rhs.binding )
+          && ( divisor == rhs.divisor );
+    }
+
+    bool operator!=( VertexInputBindingDivisorDescriptionEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    uint32_t binding = {};
+    uint32_t divisor = {};
+
+  };
+  static_assert( sizeof( VertexInputBindingDivisorDescriptionEXT ) == sizeof( VkVertexInputBindingDivisorDescriptionEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<VertexInputBindingDivisorDescriptionEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct PipelineVertexInputDivisorStateCreateInfoEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PipelineVertexInputDivisorStateCreateInfoEXT(uint32_t vertexBindingDivisorCount_ = {}, const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT* pVertexBindingDivisors_ = {}) VULKAN_HPP_NOEXCEPT
+    : vertexBindingDivisorCount( vertexBindingDivisorCount_ ), pVertexBindingDivisors( pVertexBindingDivisors_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineVertexInputDivisorStateCreateInfoEXT( PipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineVertexInputDivisorStateCreateInfoEXT( VkPipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    PipelineVertexInputDivisorStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT> const & vertexBindingDivisors_ )
+    : vertexBindingDivisorCount( static_cast<uint32_t>( vertexBindingDivisors_.size() ) ), pVertexBindingDivisors( vertexBindingDivisors_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PipelineVertexInputDivisorStateCreateInfoEXT & operator=( VkPipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+    PipelineVertexInputDivisorStateCreateInfoEXT & operator=( PipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineVertexInputDivisorStateCreateInfoEXT ) );
+      return *this;
+    }
+
+    PipelineVertexInputDivisorStateCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineVertexInputDivisorStateCreateInfoEXT & setVertexBindingDivisorCount( uint32_t vertexBindingDivisorCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vertexBindingDivisorCount = vertexBindingDivisorCount_;
+      return *this;
+    }
+
+    PipelineVertexInputDivisorStateCreateInfoEXT & setPVertexBindingDivisors( const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT* pVertexBindingDivisors_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pVertexBindingDivisors = pVertexBindingDivisors_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    PipelineVertexInputDivisorStateCreateInfoEXT & setVertexBindingDivisors( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT> const & vertexBindingDivisors_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vertexBindingDivisorCount = static_cast<uint32_t>( vertexBindingDivisors_.size() );
+      pVertexBindingDivisors = vertexBindingDivisors_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkPipelineVertexInputDivisorStateCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineVertexInputDivisorStateCreateInfoEXT*>( this );
+    }
+
+    operator VkPipelineVertexInputDivisorStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineVertexInputDivisorStateCreateInfoEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PipelineVertexInputDivisorStateCreateInfoEXT const& ) const = default;
+#else
+    bool operator==( PipelineVertexInputDivisorStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( vertexBindingDivisorCount == rhs.vertexBindingDivisorCount )
+          && ( pVertexBindingDivisors == rhs.pVertexBindingDivisors );
+    }
+
+    bool operator!=( PipelineVertexInputDivisorStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT;
+    const void* pNext = {};
+    uint32_t vertexBindingDivisorCount = {};
+    const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT* pVertexBindingDivisors = {};
+
+  };
+  static_assert( sizeof( PipelineVertexInputDivisorStateCreateInfoEXT ) == sizeof( VkPipelineVertexInputDivisorStateCreateInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineVertexInputDivisorStateCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT>
+  {
+    using Type = PipelineVertexInputDivisorStateCreateInfoEXT;
   };
 
   struct PipelineViewportCoarseSampleOrderStateCreateInfoNV
   {
-    PipelineViewportCoarseSampleOrderStateCreateInfoNV( CoarseSampleOrderTypeNV sampleOrderType_ = CoarseSampleOrderTypeNV::eDefault,
-                                                        uint32_t customSampleOrderCount_ = 0,
-                                                        const CoarseSampleOrderCustomNV* pCustomSampleOrders_ = nullptr )
-      : sampleOrderType( sampleOrderType_ )
-      , customSampleOrderCount( customSampleOrderCount_ )
-      , pCustomSampleOrders( pCustomSampleOrders_ )
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PipelineViewportCoarseSampleOrderStateCreateInfoNV(VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_ = VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV::eDefault, uint32_t customSampleOrderCount_ = {}, const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV* pCustomSampleOrders_ = {}) VULKAN_HPP_NOEXCEPT
+    : sampleOrderType( sampleOrderType_ ), customSampleOrderCount( customSampleOrderCount_ ), pCustomSampleOrders( pCustomSampleOrders_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineViewportCoarseSampleOrderStateCreateInfoNV( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineViewportCoarseSampleOrderStateCreateInfoNV( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
     {
+      *this = rhs;
     }
 
-    PipelineViewportCoarseSampleOrderStateCreateInfoNV( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( PipelineViewportCoarseSampleOrderStateCreateInfoNV ) );
-    }
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    PipelineViewportCoarseSampleOrderStateCreateInfoNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> const & customSampleOrders_ )
+    : sampleOrderType( sampleOrderType_ ), customSampleOrderCount( static_cast<uint32_t>( customSampleOrders_.size() ) ), pCustomSampleOrders( customSampleOrders_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    PipelineViewportCoarseSampleOrderStateCreateInfoNV& operator=( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs )
+    PipelineViewportCoarseSampleOrderStateCreateInfoNV & operator=( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      memcpy( this, &rhs, sizeof( PipelineViewportCoarseSampleOrderStateCreateInfoNV ) );
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportCoarseSampleOrderStateCreateInfoNV const *>( &rhs );
       return *this;
     }
-    PipelineViewportCoarseSampleOrderStateCreateInfoNV& setPNext( const void* pNext_ )
+
+    PipelineViewportCoarseSampleOrderStateCreateInfoNV & operator=( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineViewportCoarseSampleOrderStateCreateInfoNV ) );
+      return *this;
+    }
+
+    PipelineViewportCoarseSampleOrderStateCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
       pNext = pNext_;
       return *this;
     }
 
-    PipelineViewportCoarseSampleOrderStateCreateInfoNV& setSampleOrderType( CoarseSampleOrderTypeNV sampleOrderType_ )
+    PipelineViewportCoarseSampleOrderStateCreateInfoNV & setSampleOrderType( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_ ) VULKAN_HPP_NOEXCEPT
     {
       sampleOrderType = sampleOrderType_;
       return *this;
     }
 
-    PipelineViewportCoarseSampleOrderStateCreateInfoNV& setCustomSampleOrderCount( uint32_t customSampleOrderCount_ )
+    PipelineViewportCoarseSampleOrderStateCreateInfoNV & setCustomSampleOrderCount( uint32_t customSampleOrderCount_ ) VULKAN_HPP_NOEXCEPT
     {
       customSampleOrderCount = customSampleOrderCount_;
       return *this;
     }
 
-    PipelineViewportCoarseSampleOrderStateCreateInfoNV& setPCustomSampleOrders( const CoarseSampleOrderCustomNV* pCustomSampleOrders_ )
+    PipelineViewportCoarseSampleOrderStateCreateInfoNV & setPCustomSampleOrders( const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV* pCustomSampleOrders_ ) VULKAN_HPP_NOEXCEPT
     {
       pCustomSampleOrders = pCustomSampleOrders_;
       return *this;
     }
 
-    operator VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const&() const
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    PipelineViewportCoarseSampleOrderStateCreateInfoNV & setCustomSampleOrders( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> const & customSampleOrders_ ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkPipelineViewportCoarseSampleOrderStateCreateInfoNV*>(this);
+      customSampleOrderCount = static_cast<uint32_t>( customSampleOrders_.size() );
+      pCustomSampleOrders = customSampleOrders_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineViewportCoarseSampleOrderStateCreateInfoNV*>( this );
     }
 
-    operator VkPipelineViewportCoarseSampleOrderStateCreateInfoNV &()
+    operator VkPipelineViewportCoarseSampleOrderStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkPipelineViewportCoarseSampleOrderStateCreateInfoNV*>(this);
+      return *reinterpret_cast<VkPipelineViewportCoarseSampleOrderStateCreateInfoNV*>( this );
     }
 
-    bool operator==( PipelineViewportCoarseSampleOrderStateCreateInfoNV const& rhs ) const
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PipelineViewportCoarseSampleOrderStateCreateInfoNV const& ) const = default;
+#else
+    bool operator==( PipelineViewportCoarseSampleOrderStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
@@ -36812,860 +75096,6144 @@
           && ( pCustomSampleOrders == rhs.pCustomSampleOrders );
     }
 
-    bool operator!=( PipelineViewportCoarseSampleOrderStateCreateInfoNV const& rhs ) const
+    bool operator!=( PipelineViewportCoarseSampleOrderStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
+#endif
 
-  private:
-    StructureType sType = StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV;
+
 
   public:
-    const void* pNext = nullptr;
-    CoarseSampleOrderTypeNV sampleOrderType;
-    uint32_t customSampleOrderCount;
-    const CoarseSampleOrderCustomNV* pCustomSampleOrders;
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType = VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV::eDefault;
+    uint32_t customSampleOrderCount = {};
+    const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV* pCustomSampleOrders = {};
+
   };
   static_assert( sizeof( PipelineViewportCoarseSampleOrderStateCreateInfoNV ) == sizeof( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineViewportCoarseSampleOrderStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
 
-  enum class GeometryInstanceFlagBitsNV
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV>
   {
-    eTriangleCullDisable = VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV,
-    eTriangleFrontCounterclockwise = VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_NV,
-    eForceOpaque = VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_NV,
-    eForceNoOpaque = VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_NV
+    using Type = PipelineViewportCoarseSampleOrderStateCreateInfoNV;
   };
 
-  using GeometryInstanceFlagsNV = Flags<GeometryInstanceFlagBitsNV, VkGeometryInstanceFlagsNV>;
-
-  VULKAN_HPP_INLINE GeometryInstanceFlagsNV operator|( GeometryInstanceFlagBitsNV bit0, GeometryInstanceFlagBitsNV bit1 )
+  struct PipelineViewportExclusiveScissorStateCreateInfoNV
   {
-    return GeometryInstanceFlagsNV( bit0 ) | bit1;
-  }
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV;
 
-  VULKAN_HPP_INLINE GeometryInstanceFlagsNV operator~( GeometryInstanceFlagBitsNV bits )
-  {
-    return ~( GeometryInstanceFlagsNV( bits ) );
-  }
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PipelineViewportExclusiveScissorStateCreateInfoNV(uint32_t exclusiveScissorCount_ = {}, const VULKAN_HPP_NAMESPACE::Rect2D* pExclusiveScissors_ = {}) VULKAN_HPP_NOEXCEPT
+    : exclusiveScissorCount( exclusiveScissorCount_ ), pExclusiveScissors( pExclusiveScissors_ )
+    {}
 
-  template <> struct FlagTraits<GeometryInstanceFlagBitsNV>
-  {
-    enum
+    VULKAN_HPP_CONSTEXPR PipelineViewportExclusiveScissorStateCreateInfoNV( PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineViewportExclusiveScissorStateCreateInfoNV( VkPipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      allFlags = VkFlags(GeometryInstanceFlagBitsNV::eTriangleCullDisable) | VkFlags(GeometryInstanceFlagBitsNV::eTriangleFrontCounterclockwise) | VkFlags(GeometryInstanceFlagBitsNV::eForceOpaque) | VkFlags(GeometryInstanceFlagBitsNV::eForceNoOpaque)
-    };
-  };
-
-  enum class GeometryFlagBitsNV
-  {
-    eOpaque = VK_GEOMETRY_OPAQUE_BIT_NV,
-    eNoDuplicateAnyHitInvocation = VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_NV
-  };
-
-  using GeometryFlagsNV = Flags<GeometryFlagBitsNV, VkGeometryFlagsNV>;
-
-  VULKAN_HPP_INLINE GeometryFlagsNV operator|( GeometryFlagBitsNV bit0, GeometryFlagBitsNV bit1 )
-  {
-    return GeometryFlagsNV( bit0 ) | bit1;
-  }
-
-  VULKAN_HPP_INLINE GeometryFlagsNV operator~( GeometryFlagBitsNV bits )
-  {
-    return ~( GeometryFlagsNV( bits ) );
-  }
-
-  template <> struct FlagTraits<GeometryFlagBitsNV>
-  {
-    enum
-    {
-      allFlags = VkFlags(GeometryFlagBitsNV::eOpaque) | VkFlags(GeometryFlagBitsNV::eNoDuplicateAnyHitInvocation)
-    };
-  };
-
-  enum class BuildAccelerationStructureFlagBitsNV
-  {
-    eAllowUpdate = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_NV,
-    eAllowCompaction = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_NV,
-    ePreferFastTrace = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_NV,
-    ePreferFastBuild = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_NV,
-    eLowMemory = VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_NV
-  };
-
-  using BuildAccelerationStructureFlagsNV = Flags<BuildAccelerationStructureFlagBitsNV, VkBuildAccelerationStructureFlagsNV>;
-
-  VULKAN_HPP_INLINE BuildAccelerationStructureFlagsNV operator|( BuildAccelerationStructureFlagBitsNV bit0, BuildAccelerationStructureFlagBitsNV bit1 )
-  {
-    return BuildAccelerationStructureFlagsNV( bit0 ) | bit1;
-  }
-
-  VULKAN_HPP_INLINE BuildAccelerationStructureFlagsNV operator~( BuildAccelerationStructureFlagBitsNV bits )
-  {
-    return ~( BuildAccelerationStructureFlagsNV( bits ) );
-  }
-
-  template <> struct FlagTraits<BuildAccelerationStructureFlagBitsNV>
-  {
-    enum
-    {
-      allFlags = VkFlags(BuildAccelerationStructureFlagBitsNV::eAllowUpdate) | VkFlags(BuildAccelerationStructureFlagBitsNV::eAllowCompaction) | VkFlags(BuildAccelerationStructureFlagBitsNV::ePreferFastTrace) | VkFlags(BuildAccelerationStructureFlagBitsNV::ePreferFastBuild) | VkFlags(BuildAccelerationStructureFlagBitsNV::eLowMemory)
-    };
-  };
-
-  enum class CopyAccelerationStructureModeNV
-  {
-    eClone = VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_NV,
-    eCompact = VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV
-  };
-
-  enum class AccelerationStructureTypeNV
-  {
-    eTopLevel = VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV,
-    eBottomLevel = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV
-  };
-
-  enum class GeometryTypeNV
-  {
-    eTriangles = VK_GEOMETRY_TYPE_TRIANGLES_NV,
-    eAabbs = VK_GEOMETRY_TYPE_AABBS_NV
-  };
-
-  struct GeometryNV
-  {
-    GeometryNV( GeometryTypeNV geometryType_ = GeometryTypeNV::eTriangles,
-                GeometryDataNV geometry_ = GeometryDataNV(),
-                GeometryFlagsNV flags_ = GeometryFlagsNV() )
-      : geometryType( geometryType_ )
-      , geometry( geometry_ )
-      , flags( flags_ )
-    {
+      *this = rhs;
     }
 
-    GeometryNV( VkGeometryNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( GeometryNV ) );
-    }
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    PipelineViewportExclusiveScissorStateCreateInfoNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & exclusiveScissors_ )
+    : exclusiveScissorCount( static_cast<uint32_t>( exclusiveScissors_.size() ) ), pExclusiveScissors( exclusiveScissors_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    GeometryNV& operator=( VkGeometryNV const & rhs )
+    PipelineViewportExclusiveScissorStateCreateInfoNV & operator=( VkPipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      memcpy( this, &rhs, sizeof( GeometryNV ) );
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportExclusiveScissorStateCreateInfoNV const *>( &rhs );
       return *this;
     }
-    GeometryNV& setPNext( const void* pNext_ )
+
+    PipelineViewportExclusiveScissorStateCreateInfoNV & operator=( PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineViewportExclusiveScissorStateCreateInfoNV ) );
+      return *this;
+    }
+
+    PipelineViewportExclusiveScissorStateCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
       pNext = pNext_;
       return *this;
     }
 
-    GeometryNV& setGeometryType( GeometryTypeNV geometryType_ )
+    PipelineViewportExclusiveScissorStateCreateInfoNV & setExclusiveScissorCount( uint32_t exclusiveScissorCount_ ) VULKAN_HPP_NOEXCEPT
     {
-      geometryType = geometryType_;
+      exclusiveScissorCount = exclusiveScissorCount_;
       return *this;
     }
 
-    GeometryNV& setGeometry( GeometryDataNV geometry_ )
+    PipelineViewportExclusiveScissorStateCreateInfoNV & setPExclusiveScissors( const VULKAN_HPP_NAMESPACE::Rect2D* pExclusiveScissors_ ) VULKAN_HPP_NOEXCEPT
     {
-      geometry = geometry_;
+      pExclusiveScissors = pExclusiveScissors_;
       return *this;
     }
 
-    GeometryNV& setFlags( GeometryFlagsNV flags_ )
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    PipelineViewportExclusiveScissorStateCreateInfoNV & setExclusiveScissors( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & exclusiveScissors_ ) VULKAN_HPP_NOEXCEPT
+    {
+      exclusiveScissorCount = static_cast<uint32_t>( exclusiveScissors_.size() );
+      pExclusiveScissors = exclusiveScissors_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkPipelineViewportExclusiveScissorStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineViewportExclusiveScissorStateCreateInfoNV*>( this );
+    }
+
+    operator VkPipelineViewportExclusiveScissorStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineViewportExclusiveScissorStateCreateInfoNV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PipelineViewportExclusiveScissorStateCreateInfoNV const& ) const = default;
+#else
+    bool operator==( PipelineViewportExclusiveScissorStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( exclusiveScissorCount == rhs.exclusiveScissorCount )
+          && ( pExclusiveScissors == rhs.pExclusiveScissors );
+    }
+
+    bool operator!=( PipelineViewportExclusiveScissorStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV;
+    const void* pNext = {};
+    uint32_t exclusiveScissorCount = {};
+    const VULKAN_HPP_NAMESPACE::Rect2D* pExclusiveScissors = {};
+
+  };
+  static_assert( sizeof( PipelineViewportExclusiveScissorStateCreateInfoNV ) == sizeof( VkPipelineViewportExclusiveScissorStateCreateInfoNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineViewportExclusiveScissorStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV>
+  {
+    using Type = PipelineViewportExclusiveScissorStateCreateInfoNV;
+  };
+
+  struct PipelineViewportShadingRateImageStateCreateInfoNV
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PipelineViewportShadingRateImageStateCreateInfoNV(VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_ = {}, uint32_t viewportCount_ = {}, const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV* pShadingRatePalettes_ = {}) VULKAN_HPP_NOEXCEPT
+    : shadingRateImageEnable( shadingRateImageEnable_ ), viewportCount( viewportCount_ ), pShadingRatePalettes( pShadingRatePalettes_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineViewportShadingRateImageStateCreateInfoNV( PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineViewportShadingRateImageStateCreateInfoNV( VkPipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    PipelineViewportShadingRateImageStateCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> const & shadingRatePalettes_ )
+    : shadingRateImageEnable( shadingRateImageEnable_ ), viewportCount( static_cast<uint32_t>( shadingRatePalettes_.size() ) ), pShadingRatePalettes( shadingRatePalettes_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PipelineViewportShadingRateImageStateCreateInfoNV & operator=( VkPipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportShadingRateImageStateCreateInfoNV const *>( &rhs );
+      return *this;
+    }
+
+    PipelineViewportShadingRateImageStateCreateInfoNV & operator=( PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineViewportShadingRateImageStateCreateInfoNV ) );
+      return *this;
+    }
+
+    PipelineViewportShadingRateImageStateCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineViewportShadingRateImageStateCreateInfoNV & setShadingRateImageEnable( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shadingRateImageEnable = shadingRateImageEnable_;
+      return *this;
+    }
+
+    PipelineViewportShadingRateImageStateCreateInfoNV & setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      viewportCount = viewportCount_;
+      return *this;
+    }
+
+    PipelineViewportShadingRateImageStateCreateInfoNV & setPShadingRatePalettes( const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV* pShadingRatePalettes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pShadingRatePalettes = pShadingRatePalettes_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    PipelineViewportShadingRateImageStateCreateInfoNV & setShadingRatePalettes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> const & shadingRatePalettes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      viewportCount = static_cast<uint32_t>( shadingRatePalettes_.size() );
+      pShadingRatePalettes = shadingRatePalettes_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkPipelineViewportShadingRateImageStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineViewportShadingRateImageStateCreateInfoNV*>( this );
+    }
+
+    operator VkPipelineViewportShadingRateImageStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineViewportShadingRateImageStateCreateInfoNV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PipelineViewportShadingRateImageStateCreateInfoNV const& ) const = default;
+#else
+    bool operator==( PipelineViewportShadingRateImageStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shadingRateImageEnable == rhs.shadingRateImageEnable )
+          && ( viewportCount == rhs.viewportCount )
+          && ( pShadingRatePalettes == rhs.pShadingRatePalettes );
+    }
+
+    bool operator!=( PipelineViewportShadingRateImageStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable = {};
+    uint32_t viewportCount = {};
+    const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV* pShadingRatePalettes = {};
+
+  };
+  static_assert( sizeof( PipelineViewportShadingRateImageStateCreateInfoNV ) == sizeof( VkPipelineViewportShadingRateImageStateCreateInfoNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineViewportShadingRateImageStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV>
+  {
+    using Type = PipelineViewportShadingRateImageStateCreateInfoNV;
+  };
+
+  struct ViewportSwizzleNV
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ViewportSwizzleNV(VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV x_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX, VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX, VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX, VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX) VULKAN_HPP_NOEXCEPT
+    : x( x_ ), y( y_ ), z( z_ ), w( w_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ViewportSwizzleNV( ViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ViewportSwizzleNV( VkViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ViewportSwizzleNV & operator=( VkViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ViewportSwizzleNV const *>( &rhs );
+      return *this;
+    }
+
+    ViewportSwizzleNV & operator=( ViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ViewportSwizzleNV ) );
+      return *this;
+    }
+
+    ViewportSwizzleNV & setX( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV x_ ) VULKAN_HPP_NOEXCEPT
+    {
+      x = x_;
+      return *this;
+    }
+
+    ViewportSwizzleNV & setY( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y_ ) VULKAN_HPP_NOEXCEPT
+    {
+      y = y_;
+      return *this;
+    }
+
+    ViewportSwizzleNV & setZ( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z_ ) VULKAN_HPP_NOEXCEPT
+    {
+      z = z_;
+      return *this;
+    }
+
+    ViewportSwizzleNV & setW( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w_ ) VULKAN_HPP_NOEXCEPT
+    {
+      w = w_;
+      return *this;
+    }
+
+
+    operator VkViewportSwizzleNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkViewportSwizzleNV*>( this );
+    }
+
+    operator VkViewportSwizzleNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkViewportSwizzleNV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ViewportSwizzleNV const& ) const = default;
+#else
+    bool operator==( ViewportSwizzleNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( x == rhs.x )
+          && ( y == rhs.y )
+          && ( z == rhs.z )
+          && ( w == rhs.w );
+    }
+
+    bool operator!=( ViewportSwizzleNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV x = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX;
+    VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX;
+    VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX;
+    VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX;
+
+  };
+  static_assert( sizeof( ViewportSwizzleNV ) == sizeof( VkViewportSwizzleNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ViewportSwizzleNV>::value, "struct wrapper is not a standard layout!" );
+
+  struct PipelineViewportSwizzleStateCreateInfoNV
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportSwizzleStateCreateInfoNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PipelineViewportSwizzleStateCreateInfoNV(VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_ = {}, uint32_t viewportCount_ = {}, const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV* pViewportSwizzles_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), viewportCount( viewportCount_ ), pViewportSwizzles( pViewportSwizzles_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineViewportSwizzleStateCreateInfoNV( PipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineViewportSwizzleStateCreateInfoNV( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    PipelineViewportSwizzleStateCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV> const & viewportSwizzles_ )
+    : flags( flags_ ), viewportCount( static_cast<uint32_t>( viewportSwizzles_.size() ) ), pViewportSwizzles( viewportSwizzles_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PipelineViewportSwizzleStateCreateInfoNV & operator=( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateInfoNV const *>( &rhs );
+      return *this;
+    }
+
+    PipelineViewportSwizzleStateCreateInfoNV & operator=( PipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineViewportSwizzleStateCreateInfoNV ) );
+      return *this;
+    }
+
+    PipelineViewportSwizzleStateCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineViewportSwizzleStateCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
     {
       flags = flags_;
       return *this;
     }
 
-    operator VkGeometryNV const&() const
+    PipelineViewportSwizzleStateCreateInfoNV & setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkGeometryNV*>(this);
-    }
-
-    operator VkGeometryNV &()
-    {
-      return *reinterpret_cast<VkGeometryNV*>(this);
-    }
-
-    bool operator==( GeometryNV const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( geometryType == rhs.geometryType )
-          && ( geometry == rhs.geometry )
-          && ( flags == rhs.flags );
-    }
-
-    bool operator!=( GeometryNV const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eGeometryNV;
-
-  public:
-    const void* pNext = nullptr;
-    GeometryTypeNV geometryType;
-    GeometryDataNV geometry;
-    GeometryFlagsNV flags;
-  };
-  static_assert( sizeof( GeometryNV ) == sizeof( VkGeometryNV ), "struct and wrapper have different size!" );
-
-  struct AccelerationStructureInfoNV
-  {
-    AccelerationStructureInfoNV( AccelerationStructureTypeNV type_ = AccelerationStructureTypeNV::eTopLevel,
-                                 BuildAccelerationStructureFlagsNV flags_ = BuildAccelerationStructureFlagsNV(),
-                                 uint32_t instanceCount_ = 0,
-                                 uint32_t geometryCount_ = 0,
-                                 const GeometryNV* pGeometries_ = nullptr )
-      : type( type_ )
-      , flags( flags_ )
-      , instanceCount( instanceCount_ )
-      , geometryCount( geometryCount_ )
-      , pGeometries( pGeometries_ )
-    {
-    }
-
-    AccelerationStructureInfoNV( VkAccelerationStructureInfoNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( AccelerationStructureInfoNV ) );
-    }
-
-    AccelerationStructureInfoNV& operator=( VkAccelerationStructureInfoNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( AccelerationStructureInfoNV ) );
-      return *this;
-    }
-    AccelerationStructureInfoNV& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
+      viewportCount = viewportCount_;
       return *this;
     }
 
-    AccelerationStructureInfoNV& setType( AccelerationStructureTypeNV type_ )
+    PipelineViewportSwizzleStateCreateInfoNV & setPViewportSwizzles( const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV* pViewportSwizzles_ ) VULKAN_HPP_NOEXCEPT
     {
-      type = type_;
+      pViewportSwizzles = pViewportSwizzles_;
       return *this;
     }
 
-    AccelerationStructureInfoNV& setFlags( BuildAccelerationStructureFlagsNV flags_ )
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    PipelineViewportSwizzleStateCreateInfoNV & setViewportSwizzles( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV> const & viewportSwizzles_ ) VULKAN_HPP_NOEXCEPT
     {
-      flags = flags_;
+      viewportCount = static_cast<uint32_t>( viewportSwizzles_.size() );
+      pViewportSwizzles = viewportSwizzles_.data();
       return *this;
     }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
 
-    AccelerationStructureInfoNV& setInstanceCount( uint32_t instanceCount_ )
+
+    operator VkPipelineViewportSwizzleStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
     {
-      instanceCount = instanceCount_;
-      return *this;
+      return *reinterpret_cast<const VkPipelineViewportSwizzleStateCreateInfoNV*>( this );
     }
 
-    AccelerationStructureInfoNV& setGeometryCount( uint32_t geometryCount_ )
+    operator VkPipelineViewportSwizzleStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
     {
-      geometryCount = geometryCount_;
-      return *this;
+      return *reinterpret_cast<VkPipelineViewportSwizzleStateCreateInfoNV*>( this );
     }
 
-    AccelerationStructureInfoNV& setPGeometries( const GeometryNV* pGeometries_ )
-    {
-      pGeometries = pGeometries_;
-      return *this;
-    }
 
-    operator VkAccelerationStructureInfoNV const&() const
-    {
-      return *reinterpret_cast<const VkAccelerationStructureInfoNV*>(this);
-    }
-
-    operator VkAccelerationStructureInfoNV &()
-    {
-      return *reinterpret_cast<VkAccelerationStructureInfoNV*>(this);
-    }
-
-    bool operator==( AccelerationStructureInfoNV const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( type == rhs.type )
-          && ( flags == rhs.flags )
-          && ( instanceCount == rhs.instanceCount )
-          && ( geometryCount == rhs.geometryCount )
-          && ( pGeometries == rhs.pGeometries );
-    }
-
-    bool operator!=( AccelerationStructureInfoNV const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eAccelerationStructureInfoNV;
-
-  public:
-    const void* pNext = nullptr;
-    AccelerationStructureTypeNV type;
-    BuildAccelerationStructureFlagsNV flags;
-    uint32_t instanceCount;
-    uint32_t geometryCount;
-    const GeometryNV* pGeometries;
-  };
-  static_assert( sizeof( AccelerationStructureInfoNV ) == sizeof( VkAccelerationStructureInfoNV ), "struct and wrapper have different size!" );
-
-  struct AccelerationStructureCreateInfoNV
-  {
-    AccelerationStructureCreateInfoNV( DeviceSize compactedSize_ = 0,
-                                       AccelerationStructureInfoNV info_ = AccelerationStructureInfoNV() )
-      : compactedSize( compactedSize_ )
-      , info( info_ )
-    {
-    }
-
-    AccelerationStructureCreateInfoNV( VkAccelerationStructureCreateInfoNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( AccelerationStructureCreateInfoNV ) );
-    }
-
-    AccelerationStructureCreateInfoNV& operator=( VkAccelerationStructureCreateInfoNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( AccelerationStructureCreateInfoNV ) );
-      return *this;
-    }
-    AccelerationStructureCreateInfoNV& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    AccelerationStructureCreateInfoNV& setCompactedSize( DeviceSize compactedSize_ )
-    {
-      compactedSize = compactedSize_;
-      return *this;
-    }
-
-    AccelerationStructureCreateInfoNV& setInfo( AccelerationStructureInfoNV info_ )
-    {
-      info = info_;
-      return *this;
-    }
-
-    operator VkAccelerationStructureCreateInfoNV const&() const
-    {
-      return *reinterpret_cast<const VkAccelerationStructureCreateInfoNV*>(this);
-    }
-
-    operator VkAccelerationStructureCreateInfoNV &()
-    {
-      return *reinterpret_cast<VkAccelerationStructureCreateInfoNV*>(this);
-    }
-
-    bool operator==( AccelerationStructureCreateInfoNV const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( compactedSize == rhs.compactedSize )
-          && ( info == rhs.info );
-    }
-
-    bool operator!=( AccelerationStructureCreateInfoNV const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eAccelerationStructureCreateInfoNV;
-
-  public:
-    const void* pNext = nullptr;
-    DeviceSize compactedSize;
-    AccelerationStructureInfoNV info;
-  };
-  static_assert( sizeof( AccelerationStructureCreateInfoNV ) == sizeof( VkAccelerationStructureCreateInfoNV ), "struct and wrapper have different size!" );
-
-  enum class AccelerationStructureMemoryRequirementsTypeNV
-  {
-    eObject = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV,
-    eBuildScratch = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV,
-    eUpdateScratch = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV
-  };
-
-  struct AccelerationStructureMemoryRequirementsInfoNV
-  {
-    AccelerationStructureMemoryRequirementsInfoNV( AccelerationStructureMemoryRequirementsTypeNV type_ = AccelerationStructureMemoryRequirementsTypeNV::eObject,
-                                                   AccelerationStructureNV accelerationStructure_ = AccelerationStructureNV() )
-      : type( type_ )
-      , accelerationStructure( accelerationStructure_ )
-    {
-    }
-
-    AccelerationStructureMemoryRequirementsInfoNV( VkAccelerationStructureMemoryRequirementsInfoNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( AccelerationStructureMemoryRequirementsInfoNV ) );
-    }
-
-    AccelerationStructureMemoryRequirementsInfoNV& operator=( VkAccelerationStructureMemoryRequirementsInfoNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( AccelerationStructureMemoryRequirementsInfoNV ) );
-      return *this;
-    }
-    AccelerationStructureMemoryRequirementsInfoNV& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    AccelerationStructureMemoryRequirementsInfoNV& setType( AccelerationStructureMemoryRequirementsTypeNV type_ )
-    {
-      type = type_;
-      return *this;
-    }
-
-    AccelerationStructureMemoryRequirementsInfoNV& setAccelerationStructure( AccelerationStructureNV accelerationStructure_ )
-    {
-      accelerationStructure = accelerationStructure_;
-      return *this;
-    }
-
-    operator VkAccelerationStructureMemoryRequirementsInfoNV const&() const
-    {
-      return *reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV*>(this);
-    }
-
-    operator VkAccelerationStructureMemoryRequirementsInfoNV &()
-    {
-      return *reinterpret_cast<VkAccelerationStructureMemoryRequirementsInfoNV*>(this);
-    }
-
-    bool operator==( AccelerationStructureMemoryRequirementsInfoNV const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( type == rhs.type )
-          && ( accelerationStructure == rhs.accelerationStructure );
-    }
-
-    bool operator!=( AccelerationStructureMemoryRequirementsInfoNV const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eAccelerationStructureMemoryRequirementsInfoNV;
-
-  public:
-    const void* pNext = nullptr;
-    AccelerationStructureMemoryRequirementsTypeNV type;
-    AccelerationStructureNV accelerationStructure;
-  };
-  static_assert( sizeof( AccelerationStructureMemoryRequirementsInfoNV ) == sizeof( VkAccelerationStructureMemoryRequirementsInfoNV ), "struct and wrapper have different size!" );
-
-  enum class RayTracingShaderGroupTypeNV
-  {
-    eGeneral = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV,
-    eTrianglesHitGroup = VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_NV,
-    eProceduralHitGroup = VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_NV
-  };
-
-  struct RayTracingShaderGroupCreateInfoNV
-  {
-    RayTracingShaderGroupCreateInfoNV( RayTracingShaderGroupTypeNV type_ = RayTracingShaderGroupTypeNV::eGeneral,
-                                       uint32_t generalShader_ = 0,
-                                       uint32_t closestHitShader_ = 0,
-                                       uint32_t anyHitShader_ = 0,
-                                       uint32_t intersectionShader_ = 0 )
-      : type( type_ )
-      , generalShader( generalShader_ )
-      , closestHitShader( closestHitShader_ )
-      , anyHitShader( anyHitShader_ )
-      , intersectionShader( intersectionShader_ )
-    {
-    }
-
-    RayTracingShaderGroupCreateInfoNV( VkRayTracingShaderGroupCreateInfoNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( RayTracingShaderGroupCreateInfoNV ) );
-    }
-
-    RayTracingShaderGroupCreateInfoNV& operator=( VkRayTracingShaderGroupCreateInfoNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( RayTracingShaderGroupCreateInfoNV ) );
-      return *this;
-    }
-    RayTracingShaderGroupCreateInfoNV& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    RayTracingShaderGroupCreateInfoNV& setType( RayTracingShaderGroupTypeNV type_ )
-    {
-      type = type_;
-      return *this;
-    }
-
-    RayTracingShaderGroupCreateInfoNV& setGeneralShader( uint32_t generalShader_ )
-    {
-      generalShader = generalShader_;
-      return *this;
-    }
-
-    RayTracingShaderGroupCreateInfoNV& setClosestHitShader( uint32_t closestHitShader_ )
-    {
-      closestHitShader = closestHitShader_;
-      return *this;
-    }
-
-    RayTracingShaderGroupCreateInfoNV& setAnyHitShader( uint32_t anyHitShader_ )
-    {
-      anyHitShader = anyHitShader_;
-      return *this;
-    }
-
-    RayTracingShaderGroupCreateInfoNV& setIntersectionShader( uint32_t intersectionShader_ )
-    {
-      intersectionShader = intersectionShader_;
-      return *this;
-    }
-
-    operator VkRayTracingShaderGroupCreateInfoNV const&() const
-    {
-      return *reinterpret_cast<const VkRayTracingShaderGroupCreateInfoNV*>(this);
-    }
-
-    operator VkRayTracingShaderGroupCreateInfoNV &()
-    {
-      return *reinterpret_cast<VkRayTracingShaderGroupCreateInfoNV*>(this);
-    }
-
-    bool operator==( RayTracingShaderGroupCreateInfoNV const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( type == rhs.type )
-          && ( generalShader == rhs.generalShader )
-          && ( closestHitShader == rhs.closestHitShader )
-          && ( anyHitShader == rhs.anyHitShader )
-          && ( intersectionShader == rhs.intersectionShader );
-    }
-
-    bool operator!=( RayTracingShaderGroupCreateInfoNV const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eRayTracingShaderGroupCreateInfoNV;
-
-  public:
-    const void* pNext = nullptr;
-    RayTracingShaderGroupTypeNV type;
-    uint32_t generalShader;
-    uint32_t closestHitShader;
-    uint32_t anyHitShader;
-    uint32_t intersectionShader;
-  };
-  static_assert( sizeof( RayTracingShaderGroupCreateInfoNV ) == sizeof( VkRayTracingShaderGroupCreateInfoNV ), "struct and wrapper have different size!" );
-
-  struct RayTracingPipelineCreateInfoNV
-  {
-    RayTracingPipelineCreateInfoNV( PipelineCreateFlags flags_ = PipelineCreateFlags(),
-                                    uint32_t stageCount_ = 0,
-                                    const PipelineShaderStageCreateInfo* pStages_ = nullptr,
-                                    uint32_t groupCount_ = 0,
-                                    const RayTracingShaderGroupCreateInfoNV* pGroups_ = nullptr,
-                                    uint32_t maxRecursionDepth_ = 0,
-                                    PipelineLayout layout_ = PipelineLayout(),
-                                    Pipeline basePipelineHandle_ = Pipeline(),
-                                    int32_t basePipelineIndex_ = 0 )
-      : flags( flags_ )
-      , stageCount( stageCount_ )
-      , pStages( pStages_ )
-      , groupCount( groupCount_ )
-      , pGroups( pGroups_ )
-      , maxRecursionDepth( maxRecursionDepth_ )
-      , layout( layout_ )
-      , basePipelineHandle( basePipelineHandle_ )
-      , basePipelineIndex( basePipelineIndex_ )
-    {
-    }
-
-    RayTracingPipelineCreateInfoNV( VkRayTracingPipelineCreateInfoNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( RayTracingPipelineCreateInfoNV ) );
-    }
-
-    RayTracingPipelineCreateInfoNV& operator=( VkRayTracingPipelineCreateInfoNV const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( RayTracingPipelineCreateInfoNV ) );
-      return *this;
-    }
-    RayTracingPipelineCreateInfoNV& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    RayTracingPipelineCreateInfoNV& setFlags( PipelineCreateFlags flags_ )
-    {
-      flags = flags_;
-      return *this;
-    }
-
-    RayTracingPipelineCreateInfoNV& setStageCount( uint32_t stageCount_ )
-    {
-      stageCount = stageCount_;
-      return *this;
-    }
-
-    RayTracingPipelineCreateInfoNV& setPStages( const PipelineShaderStageCreateInfo* pStages_ )
-    {
-      pStages = pStages_;
-      return *this;
-    }
-
-    RayTracingPipelineCreateInfoNV& setGroupCount( uint32_t groupCount_ )
-    {
-      groupCount = groupCount_;
-      return *this;
-    }
-
-    RayTracingPipelineCreateInfoNV& setPGroups( const RayTracingShaderGroupCreateInfoNV* pGroups_ )
-    {
-      pGroups = pGroups_;
-      return *this;
-    }
-
-    RayTracingPipelineCreateInfoNV& setMaxRecursionDepth( uint32_t maxRecursionDepth_ )
-    {
-      maxRecursionDepth = maxRecursionDepth_;
-      return *this;
-    }
-
-    RayTracingPipelineCreateInfoNV& setLayout( PipelineLayout layout_ )
-    {
-      layout = layout_;
-      return *this;
-    }
-
-    RayTracingPipelineCreateInfoNV& setBasePipelineHandle( Pipeline basePipelineHandle_ )
-    {
-      basePipelineHandle = basePipelineHandle_;
-      return *this;
-    }
-
-    RayTracingPipelineCreateInfoNV& setBasePipelineIndex( int32_t basePipelineIndex_ )
-    {
-      basePipelineIndex = basePipelineIndex_;
-      return *this;
-    }
-
-    operator VkRayTracingPipelineCreateInfoNV const&() const
-    {
-      return *reinterpret_cast<const VkRayTracingPipelineCreateInfoNV*>(this);
-    }
-
-    operator VkRayTracingPipelineCreateInfoNV &()
-    {
-      return *reinterpret_cast<VkRayTracingPipelineCreateInfoNV*>(this);
-    }
-
-    bool operator==( RayTracingPipelineCreateInfoNV const& rhs ) const
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PipelineViewportSwizzleStateCreateInfoNV const& ) const = default;
+#else
+    bool operator==( PipelineViewportSwizzleStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
           && ( flags == rhs.flags )
-          && ( stageCount == rhs.stageCount )
-          && ( pStages == rhs.pStages )
-          && ( groupCount == rhs.groupCount )
-          && ( pGroups == rhs.pGroups )
-          && ( maxRecursionDepth == rhs.maxRecursionDepth )
-          && ( layout == rhs.layout )
-          && ( basePipelineHandle == rhs.basePipelineHandle )
-          && ( basePipelineIndex == rhs.basePipelineIndex );
+          && ( viewportCount == rhs.viewportCount )
+          && ( pViewportSwizzles == rhs.pViewportSwizzles );
     }
 
-    bool operator!=( RayTracingPipelineCreateInfoNV const& rhs ) const
+    bool operator!=( PipelineViewportSwizzleStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
+#endif
 
-  private:
-    StructureType sType = StructureType::eRayTracingPipelineCreateInfoNV;
+
 
   public:
-    const void* pNext = nullptr;
-    PipelineCreateFlags flags;
-    uint32_t stageCount;
-    const PipelineShaderStageCreateInfo* pStages;
-    uint32_t groupCount;
-    const RayTracingShaderGroupCreateInfoNV* pGroups;
-    uint32_t maxRecursionDepth;
-    PipelineLayout layout;
-    Pipeline basePipelineHandle;
-    int32_t basePipelineIndex;
-  };
-  static_assert( sizeof( RayTracingPipelineCreateInfoNV ) == sizeof( VkRayTracingPipelineCreateInfoNV ), "struct and wrapper have different size!" );
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportSwizzleStateCreateInfoNV;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags = {};
+    uint32_t viewportCount = {};
+    const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV* pViewportSwizzles = {};
 
-  enum class MemoryOverallocationBehaviorAMD
+  };
+  static_assert( sizeof( PipelineViewportSwizzleStateCreateInfoNV ) == sizeof( VkPipelineViewportSwizzleStateCreateInfoNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineViewportSwizzleStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineViewportSwizzleStateCreateInfoNV>
   {
-    eDefault = VK_MEMORY_OVERALLOCATION_BEHAVIOR_DEFAULT_AMD,
-    eAllowed = VK_MEMORY_OVERALLOCATION_BEHAVIOR_ALLOWED_AMD,
-    eDisallowed = VK_MEMORY_OVERALLOCATION_BEHAVIOR_DISALLOWED_AMD
+    using Type = PipelineViewportSwizzleStateCreateInfoNV;
   };
 
-  struct DeviceMemoryOverallocationCreateInfoAMD
+  struct PipelineViewportWScalingStateCreateInfoNV
   {
-    DeviceMemoryOverallocationCreateInfoAMD( MemoryOverallocationBehaviorAMD overallocationBehavior_ = MemoryOverallocationBehaviorAMD::eDefault )
-      : overallocationBehavior( overallocationBehavior_ )
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportWScalingStateCreateInfoNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PipelineViewportWScalingStateCreateInfoNV(VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_ = {}, uint32_t viewportCount_ = {}, const VULKAN_HPP_NAMESPACE::ViewportWScalingNV* pViewportWScalings_ = {}) VULKAN_HPP_NOEXCEPT
+    : viewportWScalingEnable( viewportWScalingEnable_ ), viewportCount( viewportCount_ ), pViewportWScalings( pViewportWScalings_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineViewportWScalingStateCreateInfoNV( PipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PipelineViewportWScalingStateCreateInfoNV( VkPipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
     {
+      *this = rhs;
     }
 
-    DeviceMemoryOverallocationCreateInfoAMD( VkDeviceMemoryOverallocationCreateInfoAMD const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( DeviceMemoryOverallocationCreateInfoAMD ) );
-    }
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    PipelineViewportWScalingStateCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const & viewportWScalings_ )
+    : viewportWScalingEnable( viewportWScalingEnable_ ), viewportCount( static_cast<uint32_t>( viewportWScalings_.size() ) ), pViewportWScalings( viewportWScalings_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
 
-    DeviceMemoryOverallocationCreateInfoAMD& operator=( VkDeviceMemoryOverallocationCreateInfoAMD const & rhs )
+    PipelineViewportWScalingStateCreateInfoNV & operator=( VkPipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      memcpy( this, &rhs, sizeof( DeviceMemoryOverallocationCreateInfoAMD ) );
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportWScalingStateCreateInfoNV const *>( &rhs );
       return *this;
     }
-    DeviceMemoryOverallocationCreateInfoAMD& setPNext( const void* pNext_ )
+
+    PipelineViewportWScalingStateCreateInfoNV & operator=( PipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PipelineViewportWScalingStateCreateInfoNV ) );
+      return *this;
+    }
+
+    PipelineViewportWScalingStateCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
     {
       pNext = pNext_;
       return *this;
     }
 
-    DeviceMemoryOverallocationCreateInfoAMD& setOverallocationBehavior( MemoryOverallocationBehaviorAMD overallocationBehavior_ )
+    PipelineViewportWScalingStateCreateInfoNV & setViewportWScalingEnable( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_ ) VULKAN_HPP_NOEXCEPT
     {
-      overallocationBehavior = overallocationBehavior_;
+      viewportWScalingEnable = viewportWScalingEnable_;
       return *this;
     }
 
-    operator VkDeviceMemoryOverallocationCreateInfoAMD const&() const
+    PipelineViewportWScalingStateCreateInfoNV & setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<const VkDeviceMemoryOverallocationCreateInfoAMD*>(this);
+      viewportCount = viewportCount_;
+      return *this;
     }
 
-    operator VkDeviceMemoryOverallocationCreateInfoAMD &()
+    PipelineViewportWScalingStateCreateInfoNV & setPViewportWScalings( const VULKAN_HPP_NAMESPACE::ViewportWScalingNV* pViewportWScalings_ ) VULKAN_HPP_NOEXCEPT
     {
-      return *reinterpret_cast<VkDeviceMemoryOverallocationCreateInfoAMD*>(this);
+      pViewportWScalings = pViewportWScalings_;
+      return *this;
     }
 
-    bool operator==( DeviceMemoryOverallocationCreateInfoAMD const& rhs ) const
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    PipelineViewportWScalingStateCreateInfoNV & setViewportWScalings( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const & viewportWScalings_ ) VULKAN_HPP_NOEXCEPT
+    {
+      viewportCount = static_cast<uint32_t>( viewportWScalings_.size() );
+      pViewportWScalings = viewportWScalings_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkPipelineViewportWScalingStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineViewportWScalingStateCreateInfoNV*>( this );
+    }
+
+    operator VkPipelineViewportWScalingStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineViewportWScalingStateCreateInfoNV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PipelineViewportWScalingStateCreateInfoNV const& ) const = default;
+#else
+    bool operator==( PipelineViewportWScalingStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return ( sType == rhs.sType )
           && ( pNext == rhs.pNext )
-          && ( overallocationBehavior == rhs.overallocationBehavior );
+          && ( viewportWScalingEnable == rhs.viewportWScalingEnable )
+          && ( viewportCount == rhs.viewportCount )
+          && ( pViewportWScalings == rhs.pViewportWScalings );
     }
 
-    bool operator!=( DeviceMemoryOverallocationCreateInfoAMD const& rhs ) const
+    bool operator!=( PipelineViewportWScalingStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
     {
       return !operator==( rhs );
     }
+#endif
 
-  private:
-    StructureType sType = StructureType::eDeviceMemoryOverallocationCreateInfoAMD;
+
 
   public:
-    const void* pNext = nullptr;
-    MemoryOverallocationBehaviorAMD overallocationBehavior;
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportWScalingStateCreateInfoNV;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable = {};
+    uint32_t viewportCount = {};
+    const VULKAN_HPP_NAMESPACE::ViewportWScalingNV* pViewportWScalings = {};
+
   };
-  static_assert( sizeof( DeviceMemoryOverallocationCreateInfoAMD ) == sizeof( VkDeviceMemoryOverallocationCreateInfoAMD ), "struct and wrapper have different size!" );
+  static_assert( sizeof( PipelineViewportWScalingStateCreateInfoNV ) == sizeof( VkPipelineViewportWScalingStateCreateInfoNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineViewportWScalingStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
 
-  template<typename Dispatch = DispatchLoaderStatic>
-  Result enumerateInstanceVersion( uint32_t* pApiVersion, Dispatch const &d = Dispatch() );
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch = DispatchLoaderStatic>
-  ResultValueType<uint32_t>::type enumerateInstanceVersion(Dispatch const &d = Dispatch() );
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result enumerateInstanceVersion( uint32_t* pApiVersion, Dispatch const &d)
+  template <>
+  struct CppType<StructureType, StructureType::ePipelineViewportWScalingStateCreateInfoNV>
   {
-    return static_cast<Result>( d.vkEnumerateInstanceVersion( pApiVersion ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<uint32_t>::type enumerateInstanceVersion(Dispatch const &d )
-  {
-    uint32_t apiVersion;
-    Result result = static_cast<Result>( d.vkEnumerateInstanceVersion( &apiVersion ) );
-    return createResultValue( result, apiVersion, VULKAN_HPP_NAMESPACE_STRING"::enumerateInstanceVersion" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+    using Type = PipelineViewportWScalingStateCreateInfoNV;
+  };
 
-
-  template<typename Dispatch = DispatchLoaderStatic>
-  Result enumerateInstanceLayerProperties( uint32_t* pPropertyCount, LayerProperties* pProperties, Dispatch const &d = Dispatch() );
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template <typename Allocator = std::allocator<LayerProperties>, typename Dispatch = DispatchLoaderStatic> 
-  typename ResultValueType<std::vector<LayerProperties,Allocator>>::type enumerateInstanceLayerProperties(Dispatch const &d = Dispatch() );
-  template <typename Allocator = std::allocator<LayerProperties>, typename Dispatch = DispatchLoaderStatic> 
-  typename ResultValueType<std::vector<LayerProperties,Allocator>>::type enumerateInstanceLayerProperties(Allocator const& vectorAllocator, Dispatch const &d );
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result enumerateInstanceLayerProperties( uint32_t* pPropertyCount, LayerProperties* pProperties, Dispatch const &d)
+#ifdef VK_USE_PLATFORM_GGP
+  struct PresentFrameTokenGGP
   {
-    return static_cast<Result>( d.vkEnumerateInstanceLayerProperties( pPropertyCount, reinterpret_cast<VkLayerProperties*>( pProperties ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<LayerProperties,Allocator>>::type enumerateInstanceLayerProperties(Dispatch const &d )
-  {
-    std::vector<LayerProperties,Allocator> properties;
-    uint32_t propertyCount;
-    Result result;
-    do
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentFrameTokenGGP;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PresentFrameTokenGGP(GgpFrameToken frameToken_ = {}) VULKAN_HPP_NOEXCEPT
+    : frameToken( frameToken_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PresentFrameTokenGGP( PresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PresentFrameTokenGGP( VkPresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      result = static_cast<Result>( d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ) );
-      if ( ( result == Result::eSuccess ) && propertyCount )
-      {
-        properties.resize( propertyCount );
-        result = static_cast<Result>( d.vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast<VkLayerProperties*>( properties.data() ) ) );
-      }
-    } while ( result == Result::eIncomplete );
-    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
-    properties.resize( propertyCount );
-    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::enumerateInstanceLayerProperties" );
-  }
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<LayerProperties,Allocator>>::type enumerateInstanceLayerProperties(Allocator const& vectorAllocator, Dispatch const &d )
-  {
-    std::vector<LayerProperties,Allocator> properties( vectorAllocator );
-    uint32_t propertyCount;
-    Result result;
-    do
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PresentFrameTokenGGP & operator=( VkPresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT
     {
-      result = static_cast<Result>( d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ) );
-      if ( ( result == Result::eSuccess ) && propertyCount )
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentFrameTokenGGP const *>( &rhs );
+      return *this;
+    }
+
+    PresentFrameTokenGGP & operator=( PresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PresentFrameTokenGGP ) );
+      return *this;
+    }
+
+    PresentFrameTokenGGP & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PresentFrameTokenGGP & setFrameToken( GgpFrameToken frameToken_ ) VULKAN_HPP_NOEXCEPT
+    {
+      frameToken = frameToken_;
+      return *this;
+    }
+
+
+    operator VkPresentFrameTokenGGP const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPresentFrameTokenGGP*>( this );
+    }
+
+    operator VkPresentFrameTokenGGP &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPresentFrameTokenGGP*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PresentFrameTokenGGP const& ) const = default;
+#else
+    bool operator==( PresentFrameTokenGGP const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memcmp( &frameToken, &rhs.frameToken, sizeof( GgpFrameToken ) ) == 0 );
+    }
+
+    bool operator!=( PresentFrameTokenGGP const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentFrameTokenGGP;
+    const void* pNext = {};
+    GgpFrameToken frameToken = {};
+
+  };
+  static_assert( sizeof( PresentFrameTokenGGP ) == sizeof( VkPresentFrameTokenGGP ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PresentFrameTokenGGP>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePresentFrameTokenGGP>
+  {
+    using Type = PresentFrameTokenGGP;
+  };
+#endif /*VK_USE_PLATFORM_GGP*/
+
+  struct RectLayerKHR
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR RectLayerKHR(VULKAN_HPP_NAMESPACE::Offset2D offset_ = {}, VULKAN_HPP_NAMESPACE::Extent2D extent_ = {}, uint32_t layer_ = {}) VULKAN_HPP_NOEXCEPT
+    : offset( offset_ ), extent( extent_ ), layer( layer_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR RectLayerKHR( RectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    RectLayerKHR( VkRectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    explicit RectLayerKHR( Rect2D const& rect2D, uint32_t layer_ = {} )
+      : offset( rect2D.offset )
+      , extent( rect2D.extent )
+      , layer( layer_ )
+    {}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    RectLayerKHR & operator=( VkRectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RectLayerKHR const *>( &rhs );
+      return *this;
+    }
+
+    RectLayerKHR & operator=( RectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( RectLayerKHR ) );
+      return *this;
+    }
+
+    RectLayerKHR & setOffset( VULKAN_HPP_NAMESPACE::Offset2D const & offset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      offset = offset_;
+      return *this;
+    }
+
+    RectLayerKHR & setExtent( VULKAN_HPP_NAMESPACE::Extent2D const & extent_ ) VULKAN_HPP_NOEXCEPT
+    {
+      extent = extent_;
+      return *this;
+    }
+
+    RectLayerKHR & setLayer( uint32_t layer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      layer = layer_;
+      return *this;
+    }
+
+
+    operator VkRectLayerKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkRectLayerKHR*>( this );
+    }
+
+    operator VkRectLayerKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkRectLayerKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( RectLayerKHR const& ) const = default;
+#else
+    bool operator==( RectLayerKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( offset == rhs.offset )
+          && ( extent == rhs.extent )
+          && ( layer == rhs.layer );
+    }
+
+    bool operator!=( RectLayerKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::Offset2D offset = {};
+    VULKAN_HPP_NAMESPACE::Extent2D extent = {};
+    uint32_t layer = {};
+
+  };
+  static_assert( sizeof( RectLayerKHR ) == sizeof( VkRectLayerKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<RectLayerKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct PresentRegionKHR
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PresentRegionKHR(uint32_t rectangleCount_ = {}, const VULKAN_HPP_NAMESPACE::RectLayerKHR* pRectangles_ = {}) VULKAN_HPP_NOEXCEPT
+    : rectangleCount( rectangleCount_ ), pRectangles( pRectangles_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PresentRegionKHR( PresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PresentRegionKHR( VkPresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    PresentRegionKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RectLayerKHR> const & rectangles_ )
+    : rectangleCount( static_cast<uint32_t>( rectangles_.size() ) ), pRectangles( rectangles_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PresentRegionKHR & operator=( VkPresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentRegionKHR const *>( &rhs );
+      return *this;
+    }
+
+    PresentRegionKHR & operator=( PresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PresentRegionKHR ) );
+      return *this;
+    }
+
+    PresentRegionKHR & setRectangleCount( uint32_t rectangleCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      rectangleCount = rectangleCount_;
+      return *this;
+    }
+
+    PresentRegionKHR & setPRectangles( const VULKAN_HPP_NAMESPACE::RectLayerKHR* pRectangles_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pRectangles = pRectangles_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    PresentRegionKHR & setRectangles( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RectLayerKHR> const & rectangles_ ) VULKAN_HPP_NOEXCEPT
+    {
+      rectangleCount = static_cast<uint32_t>( rectangles_.size() );
+      pRectangles = rectangles_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkPresentRegionKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPresentRegionKHR*>( this );
+    }
+
+    operator VkPresentRegionKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPresentRegionKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PresentRegionKHR const& ) const = default;
+#else
+    bool operator==( PresentRegionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( rectangleCount == rhs.rectangleCount )
+          && ( pRectangles == rhs.pRectangles );
+    }
+
+    bool operator!=( PresentRegionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    uint32_t rectangleCount = {};
+    const VULKAN_HPP_NAMESPACE::RectLayerKHR* pRectangles = {};
+
+  };
+  static_assert( sizeof( PresentRegionKHR ) == sizeof( VkPresentRegionKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PresentRegionKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct PresentRegionsKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentRegionsKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PresentRegionsKHR(uint32_t swapchainCount_ = {}, const VULKAN_HPP_NAMESPACE::PresentRegionKHR* pRegions_ = {}) VULKAN_HPP_NOEXCEPT
+    : swapchainCount( swapchainCount_ ), pRegions( pRegions_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PresentRegionsKHR( PresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PresentRegionsKHR( VkPresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    PresentRegionsKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PresentRegionKHR> const & regions_ )
+    : swapchainCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PresentRegionsKHR & operator=( VkPresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentRegionsKHR const *>( &rhs );
+      return *this;
+    }
+
+    PresentRegionsKHR & operator=( PresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PresentRegionsKHR ) );
+      return *this;
+    }
+
+    PresentRegionsKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PresentRegionsKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      swapchainCount = swapchainCount_;
+      return *this;
+    }
+
+    PresentRegionsKHR & setPRegions( const VULKAN_HPP_NAMESPACE::PresentRegionKHR* pRegions_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pRegions = pRegions_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    PresentRegionsKHR & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PresentRegionKHR> const & regions_ ) VULKAN_HPP_NOEXCEPT
+    {
+      swapchainCount = static_cast<uint32_t>( regions_.size() );
+      pRegions = regions_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkPresentRegionsKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPresentRegionsKHR*>( this );
+    }
+
+    operator VkPresentRegionsKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPresentRegionsKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PresentRegionsKHR const& ) const = default;
+#else
+    bool operator==( PresentRegionsKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( swapchainCount == rhs.swapchainCount )
+          && ( pRegions == rhs.pRegions );
+    }
+
+    bool operator!=( PresentRegionsKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentRegionsKHR;
+    const void* pNext = {};
+    uint32_t swapchainCount = {};
+    const VULKAN_HPP_NAMESPACE::PresentRegionKHR* pRegions = {};
+
+  };
+  static_assert( sizeof( PresentRegionsKHR ) == sizeof( VkPresentRegionsKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PresentRegionsKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePresentRegionsKHR>
+  {
+    using Type = PresentRegionsKHR;
+  };
+
+  struct PresentTimeGOOGLE
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PresentTimeGOOGLE(uint32_t presentID_ = {}, uint64_t desiredPresentTime_ = {}) VULKAN_HPP_NOEXCEPT
+    : presentID( presentID_ ), desiredPresentTime( desiredPresentTime_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PresentTimeGOOGLE( PresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PresentTimeGOOGLE( VkPresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PresentTimeGOOGLE & operator=( VkPresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE const *>( &rhs );
+      return *this;
+    }
+
+    PresentTimeGOOGLE & operator=( PresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PresentTimeGOOGLE ) );
+      return *this;
+    }
+
+    PresentTimeGOOGLE & setPresentID( uint32_t presentID_ ) VULKAN_HPP_NOEXCEPT
+    {
+      presentID = presentID_;
+      return *this;
+    }
+
+    PresentTimeGOOGLE & setDesiredPresentTime( uint64_t desiredPresentTime_ ) VULKAN_HPP_NOEXCEPT
+    {
+      desiredPresentTime = desiredPresentTime_;
+      return *this;
+    }
+
+
+    operator VkPresentTimeGOOGLE const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPresentTimeGOOGLE*>( this );
+    }
+
+    operator VkPresentTimeGOOGLE &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPresentTimeGOOGLE*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PresentTimeGOOGLE const& ) const = default;
+#else
+    bool operator==( PresentTimeGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( presentID == rhs.presentID )
+          && ( desiredPresentTime == rhs.desiredPresentTime );
+    }
+
+    bool operator!=( PresentTimeGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    uint32_t presentID = {};
+    uint64_t desiredPresentTime = {};
+
+  };
+  static_assert( sizeof( PresentTimeGOOGLE ) == sizeof( VkPresentTimeGOOGLE ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PresentTimeGOOGLE>::value, "struct wrapper is not a standard layout!" );
+
+  struct PresentTimesInfoGOOGLE
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentTimesInfoGOOGLE;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR PresentTimesInfoGOOGLE(uint32_t swapchainCount_ = {}, const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE* pTimes_ = {}) VULKAN_HPP_NOEXCEPT
+    : swapchainCount( swapchainCount_ ), pTimes( pTimes_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR PresentTimesInfoGOOGLE( PresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    PresentTimesInfoGOOGLE( VkPresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    PresentTimesInfoGOOGLE( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE> const & times_ )
+    : swapchainCount( static_cast<uint32_t>( times_.size() ) ), pTimes( times_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    PresentTimesInfoGOOGLE & operator=( VkPresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentTimesInfoGOOGLE const *>( &rhs );
+      return *this;
+    }
+
+    PresentTimesInfoGOOGLE & operator=( PresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( PresentTimesInfoGOOGLE ) );
+      return *this;
+    }
+
+    PresentTimesInfoGOOGLE & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PresentTimesInfoGOOGLE & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      swapchainCount = swapchainCount_;
+      return *this;
+    }
+
+    PresentTimesInfoGOOGLE & setPTimes( const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE* pTimes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pTimes = pTimes_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    PresentTimesInfoGOOGLE & setTimes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE> const & times_ ) VULKAN_HPP_NOEXCEPT
+    {
+      swapchainCount = static_cast<uint32_t>( times_.size() );
+      pTimes = times_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkPresentTimesInfoGOOGLE const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPresentTimesInfoGOOGLE*>( this );
+    }
+
+    operator VkPresentTimesInfoGOOGLE &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPresentTimesInfoGOOGLE*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( PresentTimesInfoGOOGLE const& ) const = default;
+#else
+    bool operator==( PresentTimesInfoGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( swapchainCount == rhs.swapchainCount )
+          && ( pTimes == rhs.pTimes );
+    }
+
+    bool operator!=( PresentTimesInfoGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentTimesInfoGOOGLE;
+    const void* pNext = {};
+    uint32_t swapchainCount = {};
+    const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE* pTimes = {};
+
+  };
+  static_assert( sizeof( PresentTimesInfoGOOGLE ) == sizeof( VkPresentTimesInfoGOOGLE ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PresentTimesInfoGOOGLE>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::ePresentTimesInfoGOOGLE>
+  {
+    using Type = PresentTimesInfoGOOGLE;
+  };
+
+  struct ProtectedSubmitInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eProtectedSubmitInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ProtectedSubmitInfo(VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit_ = {}) VULKAN_HPP_NOEXCEPT
+    : protectedSubmit( protectedSubmit_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ProtectedSubmitInfo( ProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ProtectedSubmitInfo( VkProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ProtectedSubmitInfo & operator=( VkProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ProtectedSubmitInfo const *>( &rhs );
+      return *this;
+    }
+
+    ProtectedSubmitInfo & operator=( ProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ProtectedSubmitInfo ) );
+      return *this;
+    }
+
+    ProtectedSubmitInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ProtectedSubmitInfo & setProtectedSubmit( VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit_ ) VULKAN_HPP_NOEXCEPT
+    {
+      protectedSubmit = protectedSubmit_;
+      return *this;
+    }
+
+
+    operator VkProtectedSubmitInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkProtectedSubmitInfo*>( this );
+    }
+
+    operator VkProtectedSubmitInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkProtectedSubmitInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ProtectedSubmitInfo const& ) const = default;
+#else
+    bool operator==( ProtectedSubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( protectedSubmit == rhs.protectedSubmit );
+    }
+
+    bool operator!=( ProtectedSubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eProtectedSubmitInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit = {};
+
+  };
+  static_assert( sizeof( ProtectedSubmitInfo ) == sizeof( VkProtectedSubmitInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ProtectedSubmitInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eProtectedSubmitInfo>
+  {
+    using Type = ProtectedSubmitInfo;
+  };
+
+  struct QueryPoolPerformanceQueryCreateInfoINTEL
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR QueryPoolPerformanceQueryCreateInfoINTEL(VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling_ = VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL::eManual) VULKAN_HPP_NOEXCEPT
+    : performanceCountersSampling( performanceCountersSampling_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR QueryPoolPerformanceQueryCreateInfoINTEL( QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    QueryPoolPerformanceQueryCreateInfoINTEL( VkQueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    QueryPoolPerformanceQueryCreateInfoINTEL & operator=( VkQueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueryPoolPerformanceQueryCreateInfoINTEL const *>( &rhs );
+      return *this;
+    }
+
+    QueryPoolPerformanceQueryCreateInfoINTEL & operator=( QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( QueryPoolPerformanceQueryCreateInfoINTEL ) );
+      return *this;
+    }
+
+    QueryPoolPerformanceQueryCreateInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    QueryPoolPerformanceQueryCreateInfoINTEL & setPerformanceCountersSampling( VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling_ ) VULKAN_HPP_NOEXCEPT
+    {
+      performanceCountersSampling = performanceCountersSampling_;
+      return *this;
+    }
+
+
+    operator VkQueryPoolPerformanceQueryCreateInfoINTEL const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkQueryPoolPerformanceQueryCreateInfoINTEL*>( this );
+    }
+
+    operator VkQueryPoolPerformanceQueryCreateInfoINTEL &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkQueryPoolPerformanceQueryCreateInfoINTEL*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( QueryPoolPerformanceQueryCreateInfoINTEL const& ) const = default;
+#else
+    bool operator==( QueryPoolPerformanceQueryCreateInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( performanceCountersSampling == rhs.performanceCountersSampling );
+    }
+
+    bool operator!=( QueryPoolPerformanceQueryCreateInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling = VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL::eManual;
+
+  };
+  static_assert( sizeof( QueryPoolPerformanceQueryCreateInfoINTEL ) == sizeof( VkQueryPoolPerformanceQueryCreateInfoINTEL ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<QueryPoolPerformanceQueryCreateInfoINTEL>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL>
+  {
+    using Type = QueryPoolPerformanceQueryCreateInfoINTEL;
+  };
+  using QueryPoolCreateInfoINTEL = QueryPoolPerformanceQueryCreateInfoINTEL;
+
+  struct QueueFamilyCheckpointPropertiesNV
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyCheckpointPropertiesNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointPropertiesNV(VULKAN_HPP_NAMESPACE::PipelineStageFlags checkpointExecutionStageMask_ = {}) VULKAN_HPP_NOEXCEPT
+    : checkpointExecutionStageMask( checkpointExecutionStageMask_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointPropertiesNV( QueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    QueueFamilyCheckpointPropertiesNV( VkQueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    QueueFamilyCheckpointPropertiesNV & operator=( VkQueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointPropertiesNV const *>( &rhs );
+      return *this;
+    }
+
+    QueueFamilyCheckpointPropertiesNV & operator=( QueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( QueueFamilyCheckpointPropertiesNV ) );
+      return *this;
+    }
+
+
+    operator VkQueueFamilyCheckpointPropertiesNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkQueueFamilyCheckpointPropertiesNV*>( this );
+    }
+
+    operator VkQueueFamilyCheckpointPropertiesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkQueueFamilyCheckpointPropertiesNV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( QueueFamilyCheckpointPropertiesNV const& ) const = default;
+#else
+    bool operator==( QueueFamilyCheckpointPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( checkpointExecutionStageMask == rhs.checkpointExecutionStageMask );
+    }
+
+    bool operator!=( QueueFamilyCheckpointPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyCheckpointPropertiesNV;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::PipelineStageFlags checkpointExecutionStageMask = {};
+
+  };
+  static_assert( sizeof( QueueFamilyCheckpointPropertiesNV ) == sizeof( VkQueueFamilyCheckpointPropertiesNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<QueueFamilyCheckpointPropertiesNV>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eQueueFamilyCheckpointPropertiesNV>
+  {
+    using Type = QueueFamilyCheckpointPropertiesNV;
+  };
+
+  struct RenderPassAttachmentBeginInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassAttachmentBeginInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR RenderPassAttachmentBeginInfo(uint32_t attachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageView* pAttachments_ = {}) VULKAN_HPP_NOEXCEPT
+    : attachmentCount( attachmentCount_ ), pAttachments( pAttachments_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR RenderPassAttachmentBeginInfo( RenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    RenderPassAttachmentBeginInfo( VkRenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    RenderPassAttachmentBeginInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageView> const & attachments_ )
+    : attachmentCount( static_cast<uint32_t>( attachments_.size() ) ), pAttachments( attachments_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    RenderPassAttachmentBeginInfo & operator=( VkRenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassAttachmentBeginInfo const *>( &rhs );
+      return *this;
+    }
+
+    RenderPassAttachmentBeginInfo & operator=( RenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( RenderPassAttachmentBeginInfo ) );
+      return *this;
+    }
+
+    RenderPassAttachmentBeginInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    RenderPassAttachmentBeginInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      attachmentCount = attachmentCount_;
+      return *this;
+    }
+
+    RenderPassAttachmentBeginInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::ImageView* pAttachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pAttachments = pAttachments_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    RenderPassAttachmentBeginInfo & setAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageView> const & attachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      attachmentCount = static_cast<uint32_t>( attachments_.size() );
+      pAttachments = attachments_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkRenderPassAttachmentBeginInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkRenderPassAttachmentBeginInfo*>( this );
+    }
+
+    operator VkRenderPassAttachmentBeginInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkRenderPassAttachmentBeginInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( RenderPassAttachmentBeginInfo const& ) const = default;
+#else
+    bool operator==( RenderPassAttachmentBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( attachmentCount == rhs.attachmentCount )
+          && ( pAttachments == rhs.pAttachments );
+    }
+
+    bool operator!=( RenderPassAttachmentBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassAttachmentBeginInfo;
+    const void* pNext = {};
+    uint32_t attachmentCount = {};
+    const VULKAN_HPP_NAMESPACE::ImageView* pAttachments = {};
+
+  };
+  static_assert( sizeof( RenderPassAttachmentBeginInfo ) == sizeof( VkRenderPassAttachmentBeginInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<RenderPassAttachmentBeginInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eRenderPassAttachmentBeginInfo>
+  {
+    using Type = RenderPassAttachmentBeginInfo;
+  };
+  using RenderPassAttachmentBeginInfoKHR = RenderPassAttachmentBeginInfo;
+
+  struct RenderPassFragmentDensityMapCreateInfoEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassFragmentDensityMapCreateInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR RenderPassFragmentDensityMapCreateInfoEXT(VULKAN_HPP_NAMESPACE::AttachmentReference fragmentDensityMapAttachment_ = {}) VULKAN_HPP_NOEXCEPT
+    : fragmentDensityMapAttachment( fragmentDensityMapAttachment_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR RenderPassFragmentDensityMapCreateInfoEXT( RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    RenderPassFragmentDensityMapCreateInfoEXT( VkRenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    RenderPassFragmentDensityMapCreateInfoEXT & operator=( VkRenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassFragmentDensityMapCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+    RenderPassFragmentDensityMapCreateInfoEXT & operator=( RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( RenderPassFragmentDensityMapCreateInfoEXT ) );
+      return *this;
+    }
+
+    RenderPassFragmentDensityMapCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    RenderPassFragmentDensityMapCreateInfoEXT & setFragmentDensityMapAttachment( VULKAN_HPP_NAMESPACE::AttachmentReference const & fragmentDensityMapAttachment_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fragmentDensityMapAttachment = fragmentDensityMapAttachment_;
+      return *this;
+    }
+
+
+    operator VkRenderPassFragmentDensityMapCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkRenderPassFragmentDensityMapCreateInfoEXT*>( this );
+    }
+
+    operator VkRenderPassFragmentDensityMapCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkRenderPassFragmentDensityMapCreateInfoEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( RenderPassFragmentDensityMapCreateInfoEXT const& ) const = default;
+#else
+    bool operator==( RenderPassFragmentDensityMapCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( fragmentDensityMapAttachment == rhs.fragmentDensityMapAttachment );
+    }
+
+    bool operator!=( RenderPassFragmentDensityMapCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassFragmentDensityMapCreateInfoEXT;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::AttachmentReference fragmentDensityMapAttachment = {};
+
+  };
+  static_assert( sizeof( RenderPassFragmentDensityMapCreateInfoEXT ) == sizeof( VkRenderPassFragmentDensityMapCreateInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<RenderPassFragmentDensityMapCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eRenderPassFragmentDensityMapCreateInfoEXT>
+  {
+    using Type = RenderPassFragmentDensityMapCreateInfoEXT;
+  };
+
+  struct RenderPassInputAttachmentAspectCreateInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassInputAttachmentAspectCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR RenderPassInputAttachmentAspectCreateInfo(uint32_t aspectReferenceCount_ = {}, const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference* pAspectReferences_ = {}) VULKAN_HPP_NOEXCEPT
+    : aspectReferenceCount( aspectReferenceCount_ ), pAspectReferences( pAspectReferences_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR RenderPassInputAttachmentAspectCreateInfo( RenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    RenderPassInputAttachmentAspectCreateInfo( VkRenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    RenderPassInputAttachmentAspectCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference> const & aspectReferences_ )
+    : aspectReferenceCount( static_cast<uint32_t>( aspectReferences_.size() ) ), pAspectReferences( aspectReferences_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    RenderPassInputAttachmentAspectCreateInfo & operator=( VkRenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassInputAttachmentAspectCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+    RenderPassInputAttachmentAspectCreateInfo & operator=( RenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( RenderPassInputAttachmentAspectCreateInfo ) );
+      return *this;
+    }
+
+    RenderPassInputAttachmentAspectCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    RenderPassInputAttachmentAspectCreateInfo & setAspectReferenceCount( uint32_t aspectReferenceCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      aspectReferenceCount = aspectReferenceCount_;
+      return *this;
+    }
+
+    RenderPassInputAttachmentAspectCreateInfo & setPAspectReferences( const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference* pAspectReferences_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pAspectReferences = pAspectReferences_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    RenderPassInputAttachmentAspectCreateInfo & setAspectReferences( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference> const & aspectReferences_ ) VULKAN_HPP_NOEXCEPT
+    {
+      aspectReferenceCount = static_cast<uint32_t>( aspectReferences_.size() );
+      pAspectReferences = aspectReferences_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkRenderPassInputAttachmentAspectCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkRenderPassInputAttachmentAspectCreateInfo*>( this );
+    }
+
+    operator VkRenderPassInputAttachmentAspectCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkRenderPassInputAttachmentAspectCreateInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( RenderPassInputAttachmentAspectCreateInfo const& ) const = default;
+#else
+    bool operator==( RenderPassInputAttachmentAspectCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( aspectReferenceCount == rhs.aspectReferenceCount )
+          && ( pAspectReferences == rhs.pAspectReferences );
+    }
+
+    bool operator!=( RenderPassInputAttachmentAspectCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassInputAttachmentAspectCreateInfo;
+    const void* pNext = {};
+    uint32_t aspectReferenceCount = {};
+    const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference* pAspectReferences = {};
+
+  };
+  static_assert( sizeof( RenderPassInputAttachmentAspectCreateInfo ) == sizeof( VkRenderPassInputAttachmentAspectCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<RenderPassInputAttachmentAspectCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eRenderPassInputAttachmentAspectCreateInfo>
+  {
+    using Type = RenderPassInputAttachmentAspectCreateInfo;
+  };
+  using RenderPassInputAttachmentAspectCreateInfoKHR = RenderPassInputAttachmentAspectCreateInfo;
+
+  struct RenderPassMultiviewCreateInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassMultiviewCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR RenderPassMultiviewCreateInfo(uint32_t subpassCount_ = {}, const uint32_t* pViewMasks_ = {}, uint32_t dependencyCount_ = {}, const int32_t* pViewOffsets_ = {}, uint32_t correlationMaskCount_ = {}, const uint32_t* pCorrelationMasks_ = {}) VULKAN_HPP_NOEXCEPT
+    : subpassCount( subpassCount_ ), pViewMasks( pViewMasks_ ), dependencyCount( dependencyCount_ ), pViewOffsets( pViewOffsets_ ), correlationMaskCount( correlationMaskCount_ ), pCorrelationMasks( pCorrelationMasks_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR RenderPassMultiviewCreateInfo( RenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    RenderPassMultiviewCreateInfo( VkRenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    RenderPassMultiviewCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & viewMasks_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const int32_t> const & viewOffsets_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & correlationMasks_ = {} )
+    : subpassCount( static_cast<uint32_t>( viewMasks_.size() ) ), pViewMasks( viewMasks_.data() ), dependencyCount( static_cast<uint32_t>( viewOffsets_.size() ) ), pViewOffsets( viewOffsets_.data() ), correlationMaskCount( static_cast<uint32_t>( correlationMasks_.size() ) ), pCorrelationMasks( correlationMasks_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    RenderPassMultiviewCreateInfo & operator=( VkRenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassMultiviewCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+    RenderPassMultiviewCreateInfo & operator=( RenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( RenderPassMultiviewCreateInfo ) );
+      return *this;
+    }
+
+    RenderPassMultiviewCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    RenderPassMultiviewCreateInfo & setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      subpassCount = subpassCount_;
+      return *this;
+    }
+
+    RenderPassMultiviewCreateInfo & setPViewMasks( const uint32_t* pViewMasks_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pViewMasks = pViewMasks_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    RenderPassMultiviewCreateInfo & setViewMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & viewMasks_ ) VULKAN_HPP_NOEXCEPT
+    {
+      subpassCount = static_cast<uint32_t>( viewMasks_.size() );
+      pViewMasks = viewMasks_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    RenderPassMultiviewCreateInfo & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dependencyCount = dependencyCount_;
+      return *this;
+    }
+
+    RenderPassMultiviewCreateInfo & setPViewOffsets( const int32_t* pViewOffsets_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pViewOffsets = pViewOffsets_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    RenderPassMultiviewCreateInfo & setViewOffsets( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const int32_t> const & viewOffsets_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dependencyCount = static_cast<uint32_t>( viewOffsets_.size() );
+      pViewOffsets = viewOffsets_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    RenderPassMultiviewCreateInfo & setCorrelationMaskCount( uint32_t correlationMaskCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      correlationMaskCount = correlationMaskCount_;
+      return *this;
+    }
+
+    RenderPassMultiviewCreateInfo & setPCorrelationMasks( const uint32_t* pCorrelationMasks_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pCorrelationMasks = pCorrelationMasks_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    RenderPassMultiviewCreateInfo & setCorrelationMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & correlationMasks_ ) VULKAN_HPP_NOEXCEPT
+    {
+      correlationMaskCount = static_cast<uint32_t>( correlationMasks_.size() );
+      pCorrelationMasks = correlationMasks_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkRenderPassMultiviewCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkRenderPassMultiviewCreateInfo*>( this );
+    }
+
+    operator VkRenderPassMultiviewCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkRenderPassMultiviewCreateInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( RenderPassMultiviewCreateInfo const& ) const = default;
+#else
+    bool operator==( RenderPassMultiviewCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( subpassCount == rhs.subpassCount )
+          && ( pViewMasks == rhs.pViewMasks )
+          && ( dependencyCount == rhs.dependencyCount )
+          && ( pViewOffsets == rhs.pViewOffsets )
+          && ( correlationMaskCount == rhs.correlationMaskCount )
+          && ( pCorrelationMasks == rhs.pCorrelationMasks );
+    }
+
+    bool operator!=( RenderPassMultiviewCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassMultiviewCreateInfo;
+    const void* pNext = {};
+    uint32_t subpassCount = {};
+    const uint32_t* pViewMasks = {};
+    uint32_t dependencyCount = {};
+    const int32_t* pViewOffsets = {};
+    uint32_t correlationMaskCount = {};
+    const uint32_t* pCorrelationMasks = {};
+
+  };
+  static_assert( sizeof( RenderPassMultiviewCreateInfo ) == sizeof( VkRenderPassMultiviewCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<RenderPassMultiviewCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eRenderPassMultiviewCreateInfo>
+  {
+    using Type = RenderPassMultiviewCreateInfo;
+  };
+  using RenderPassMultiviewCreateInfoKHR = RenderPassMultiviewCreateInfo;
+
+  struct SubpassSampleLocationsEXT
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR SubpassSampleLocationsEXT(uint32_t subpassIndex_ = {}, VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {}) VULKAN_HPP_NOEXCEPT
+    : subpassIndex( subpassIndex_ ), sampleLocationsInfo( sampleLocationsInfo_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SubpassSampleLocationsEXT( SubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SubpassSampleLocationsEXT( VkSubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    SubpassSampleLocationsEXT & operator=( VkSubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT const *>( &rhs );
+      return *this;
+    }
+
+    SubpassSampleLocationsEXT & operator=( SubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( SubpassSampleLocationsEXT ) );
+      return *this;
+    }
+
+    SubpassSampleLocationsEXT & setSubpassIndex( uint32_t subpassIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      subpassIndex = subpassIndex_;
+      return *this;
+    }
+
+    SubpassSampleLocationsEXT & setSampleLocationsInfo( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sampleLocationsInfo = sampleLocationsInfo_;
+      return *this;
+    }
+
+
+    operator VkSubpassSampleLocationsEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSubpassSampleLocationsEXT*>( this );
+    }
+
+    operator VkSubpassSampleLocationsEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSubpassSampleLocationsEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( SubpassSampleLocationsEXT const& ) const = default;
+#else
+    bool operator==( SubpassSampleLocationsEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( subpassIndex == rhs.subpassIndex )
+          && ( sampleLocationsInfo == rhs.sampleLocationsInfo );
+    }
+
+    bool operator!=( SubpassSampleLocationsEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    uint32_t subpassIndex = {};
+    VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo = {};
+
+  };
+  static_assert( sizeof( SubpassSampleLocationsEXT ) == sizeof( VkSubpassSampleLocationsEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SubpassSampleLocationsEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct RenderPassSampleLocationsBeginInfoEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassSampleLocationsBeginInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR RenderPassSampleLocationsBeginInfoEXT(uint32_t attachmentInitialSampleLocationsCount_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT* pAttachmentInitialSampleLocations_ = {}, uint32_t postSubpassSampleLocationsCount_ = {}, const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT* pPostSubpassSampleLocations_ = {}) VULKAN_HPP_NOEXCEPT
+    : attachmentInitialSampleLocationsCount( attachmentInitialSampleLocationsCount_ ), pAttachmentInitialSampleLocations( pAttachmentInitialSampleLocations_ ), postSubpassSampleLocationsCount( postSubpassSampleLocationsCount_ ), pPostSubpassSampleLocations( pPostSubpassSampleLocations_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR RenderPassSampleLocationsBeginInfoEXT( RenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    RenderPassSampleLocationsBeginInfoEXT( VkRenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    RenderPassSampleLocationsBeginInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT> const & attachmentInitialSampleLocations_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT> const & postSubpassSampleLocations_ = {} )
+    : attachmentInitialSampleLocationsCount( static_cast<uint32_t>( attachmentInitialSampleLocations_.size() ) ), pAttachmentInitialSampleLocations( attachmentInitialSampleLocations_.data() ), postSubpassSampleLocationsCount( static_cast<uint32_t>( postSubpassSampleLocations_.size() ) ), pPostSubpassSampleLocations( postSubpassSampleLocations_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    RenderPassSampleLocationsBeginInfoEXT & operator=( VkRenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassSampleLocationsBeginInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+    RenderPassSampleLocationsBeginInfoEXT & operator=( RenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( RenderPassSampleLocationsBeginInfoEXT ) );
+      return *this;
+    }
+
+    RenderPassSampleLocationsBeginInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    RenderPassSampleLocationsBeginInfoEXT & setAttachmentInitialSampleLocationsCount( uint32_t attachmentInitialSampleLocationsCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      attachmentInitialSampleLocationsCount = attachmentInitialSampleLocationsCount_;
+      return *this;
+    }
+
+    RenderPassSampleLocationsBeginInfoEXT & setPAttachmentInitialSampleLocations( const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT* pAttachmentInitialSampleLocations_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pAttachmentInitialSampleLocations = pAttachmentInitialSampleLocations_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    RenderPassSampleLocationsBeginInfoEXT & setAttachmentInitialSampleLocations( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT> const & attachmentInitialSampleLocations_ ) VULKAN_HPP_NOEXCEPT
+    {
+      attachmentInitialSampleLocationsCount = static_cast<uint32_t>( attachmentInitialSampleLocations_.size() );
+      pAttachmentInitialSampleLocations = attachmentInitialSampleLocations_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    RenderPassSampleLocationsBeginInfoEXT & setPostSubpassSampleLocationsCount( uint32_t postSubpassSampleLocationsCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      postSubpassSampleLocationsCount = postSubpassSampleLocationsCount_;
+      return *this;
+    }
+
+    RenderPassSampleLocationsBeginInfoEXT & setPPostSubpassSampleLocations( const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT* pPostSubpassSampleLocations_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pPostSubpassSampleLocations = pPostSubpassSampleLocations_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    RenderPassSampleLocationsBeginInfoEXT & setPostSubpassSampleLocations( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT> const & postSubpassSampleLocations_ ) VULKAN_HPP_NOEXCEPT
+    {
+      postSubpassSampleLocationsCount = static_cast<uint32_t>( postSubpassSampleLocations_.size() );
+      pPostSubpassSampleLocations = postSubpassSampleLocations_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkRenderPassSampleLocationsBeginInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkRenderPassSampleLocationsBeginInfoEXT*>( this );
+    }
+
+    operator VkRenderPassSampleLocationsBeginInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkRenderPassSampleLocationsBeginInfoEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( RenderPassSampleLocationsBeginInfoEXT const& ) const = default;
+#else
+    bool operator==( RenderPassSampleLocationsBeginInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( attachmentInitialSampleLocationsCount == rhs.attachmentInitialSampleLocationsCount )
+          && ( pAttachmentInitialSampleLocations == rhs.pAttachmentInitialSampleLocations )
+          && ( postSubpassSampleLocationsCount == rhs.postSubpassSampleLocationsCount )
+          && ( pPostSubpassSampleLocations == rhs.pPostSubpassSampleLocations );
+    }
+
+    bool operator!=( RenderPassSampleLocationsBeginInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassSampleLocationsBeginInfoEXT;
+    const void* pNext = {};
+    uint32_t attachmentInitialSampleLocationsCount = {};
+    const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT* pAttachmentInitialSampleLocations = {};
+    uint32_t postSubpassSampleLocationsCount = {};
+    const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT* pPostSubpassSampleLocations = {};
+
+  };
+  static_assert( sizeof( RenderPassSampleLocationsBeginInfoEXT ) == sizeof( VkRenderPassSampleLocationsBeginInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<RenderPassSampleLocationsBeginInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eRenderPassSampleLocationsBeginInfoEXT>
+  {
+    using Type = RenderPassSampleLocationsBeginInfoEXT;
+  };
+
+  struct RenderPassTransformBeginInfoQCOM
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassTransformBeginInfoQCOM;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR RenderPassTransformBeginInfoQCOM(VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity) VULKAN_HPP_NOEXCEPT
+    : transform( transform_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR RenderPassTransformBeginInfoQCOM( RenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    RenderPassTransformBeginInfoQCOM( VkRenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    RenderPassTransformBeginInfoQCOM & operator=( VkRenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassTransformBeginInfoQCOM const *>( &rhs );
+      return *this;
+    }
+
+    RenderPassTransformBeginInfoQCOM & operator=( RenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( RenderPassTransformBeginInfoQCOM ) );
+      return *this;
+    }
+
+    RenderPassTransformBeginInfoQCOM & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    RenderPassTransformBeginInfoQCOM & setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT
+    {
+      transform = transform_;
+      return *this;
+    }
+
+
+    operator VkRenderPassTransformBeginInfoQCOM const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkRenderPassTransformBeginInfoQCOM*>( this );
+    }
+
+    operator VkRenderPassTransformBeginInfoQCOM &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkRenderPassTransformBeginInfoQCOM*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( RenderPassTransformBeginInfoQCOM const& ) const = default;
+#else
+    bool operator==( RenderPassTransformBeginInfoQCOM const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( transform == rhs.transform );
+    }
+
+    bool operator!=( RenderPassTransformBeginInfoQCOM const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassTransformBeginInfoQCOM;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
+
+  };
+  static_assert( sizeof( RenderPassTransformBeginInfoQCOM ) == sizeof( VkRenderPassTransformBeginInfoQCOM ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<RenderPassTransformBeginInfoQCOM>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eRenderPassTransformBeginInfoQCOM>
+  {
+    using Type = RenderPassTransformBeginInfoQCOM;
+  };
+
+  struct SamplerCustomBorderColorCreateInfoEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerCustomBorderColorCreateInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    SamplerCustomBorderColorCreateInfoEXT(VULKAN_HPP_NAMESPACE::ClearColorValue customBorderColor_ = {}, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined) VULKAN_HPP_NOEXCEPT
+    : customBorderColor( customBorderColor_ ), format( format_ )
+    {}
+
+    SamplerCustomBorderColorCreateInfoEXT( SamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SamplerCustomBorderColorCreateInfoEXT( VkSamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    SamplerCustomBorderColorCreateInfoEXT & operator=( VkSamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerCustomBorderColorCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+    SamplerCustomBorderColorCreateInfoEXT & operator=( SamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( SamplerCustomBorderColorCreateInfoEXT ) );
+      return *this;
+    }
+
+    SamplerCustomBorderColorCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    SamplerCustomBorderColorCreateInfoEXT & setCustomBorderColor( VULKAN_HPP_NAMESPACE::ClearColorValue const & customBorderColor_ ) VULKAN_HPP_NOEXCEPT
+    {
+      customBorderColor = customBorderColor_;
+      return *this;
+    }
+
+    SamplerCustomBorderColorCreateInfoEXT & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
+    {
+      format = format_;
+      return *this;
+    }
+
+
+    operator VkSamplerCustomBorderColorCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSamplerCustomBorderColorCreateInfoEXT*>( this );
+    }
+
+    operator VkSamplerCustomBorderColorCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSamplerCustomBorderColorCreateInfoEXT*>( this );
+    }
+
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerCustomBorderColorCreateInfoEXT;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::ClearColorValue customBorderColor = {};
+    VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+
+  };
+  static_assert( sizeof( SamplerCustomBorderColorCreateInfoEXT ) == sizeof( VkSamplerCustomBorderColorCreateInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SamplerCustomBorderColorCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eSamplerCustomBorderColorCreateInfoEXT>
+  {
+    using Type = SamplerCustomBorderColorCreateInfoEXT;
+  };
+
+  struct SamplerReductionModeCreateInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerReductionModeCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR SamplerReductionModeCreateInfo(VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode_ = VULKAN_HPP_NAMESPACE::SamplerReductionMode::eWeightedAverage) VULKAN_HPP_NOEXCEPT
+    : reductionMode( reductionMode_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SamplerReductionModeCreateInfo( SamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SamplerReductionModeCreateInfo( VkSamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    SamplerReductionModeCreateInfo & operator=( VkSamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerReductionModeCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+    SamplerReductionModeCreateInfo & operator=( SamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( SamplerReductionModeCreateInfo ) );
+      return *this;
+    }
+
+    SamplerReductionModeCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    SamplerReductionModeCreateInfo & setReductionMode( VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      reductionMode = reductionMode_;
+      return *this;
+    }
+
+
+    operator VkSamplerReductionModeCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSamplerReductionModeCreateInfo*>( this );
+    }
+
+    operator VkSamplerReductionModeCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSamplerReductionModeCreateInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( SamplerReductionModeCreateInfo const& ) const = default;
+#else
+    bool operator==( SamplerReductionModeCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( reductionMode == rhs.reductionMode );
+    }
+
+    bool operator!=( SamplerReductionModeCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerReductionModeCreateInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode = VULKAN_HPP_NAMESPACE::SamplerReductionMode::eWeightedAverage;
+
+  };
+  static_assert( sizeof( SamplerReductionModeCreateInfo ) == sizeof( VkSamplerReductionModeCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SamplerReductionModeCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eSamplerReductionModeCreateInfo>
+  {
+    using Type = SamplerReductionModeCreateInfo;
+  };
+  using SamplerReductionModeCreateInfoEXT = SamplerReductionModeCreateInfo;
+
+  struct SamplerYcbcrConversionImageFormatProperties
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerYcbcrConversionImageFormatProperties;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionImageFormatProperties(uint32_t combinedImageSamplerDescriptorCount_ = {}) VULKAN_HPP_NOEXCEPT
+    : combinedImageSamplerDescriptorCount( combinedImageSamplerDescriptorCount_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionImageFormatProperties( SamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SamplerYcbcrConversionImageFormatProperties( VkSamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    SamplerYcbcrConversionImageFormatProperties & operator=( VkSamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionImageFormatProperties const *>( &rhs );
+      return *this;
+    }
+
+    SamplerYcbcrConversionImageFormatProperties & operator=( SamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( SamplerYcbcrConversionImageFormatProperties ) );
+      return *this;
+    }
+
+
+    operator VkSamplerYcbcrConversionImageFormatProperties const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSamplerYcbcrConversionImageFormatProperties*>( this );
+    }
+
+    operator VkSamplerYcbcrConversionImageFormatProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSamplerYcbcrConversionImageFormatProperties*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( SamplerYcbcrConversionImageFormatProperties const& ) const = default;
+#else
+    bool operator==( SamplerYcbcrConversionImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( combinedImageSamplerDescriptorCount == rhs.combinedImageSamplerDescriptorCount );
+    }
+
+    bool operator!=( SamplerYcbcrConversionImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionImageFormatProperties;
+    void* pNext = {};
+    uint32_t combinedImageSamplerDescriptorCount = {};
+
+  };
+  static_assert( sizeof( SamplerYcbcrConversionImageFormatProperties ) == sizeof( VkSamplerYcbcrConversionImageFormatProperties ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SamplerYcbcrConversionImageFormatProperties>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eSamplerYcbcrConversionImageFormatProperties>
+  {
+    using Type = SamplerYcbcrConversionImageFormatProperties;
+  };
+  using SamplerYcbcrConversionImageFormatPropertiesKHR = SamplerYcbcrConversionImageFormatProperties;
+
+  struct SamplerYcbcrConversionInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerYcbcrConversionInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionInfo(VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion_ = {}) VULKAN_HPP_NOEXCEPT
+    : conversion( conversion_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionInfo( SamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SamplerYcbcrConversionInfo( VkSamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    SamplerYcbcrConversionInfo & operator=( VkSamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionInfo const *>( &rhs );
+      return *this;
+    }
+
+    SamplerYcbcrConversionInfo & operator=( SamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( SamplerYcbcrConversionInfo ) );
+      return *this;
+    }
+
+    SamplerYcbcrConversionInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    SamplerYcbcrConversionInfo & setConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion_ ) VULKAN_HPP_NOEXCEPT
+    {
+      conversion = conversion_;
+      return *this;
+    }
+
+
+    operator VkSamplerYcbcrConversionInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSamplerYcbcrConversionInfo*>( this );
+    }
+
+    operator VkSamplerYcbcrConversionInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSamplerYcbcrConversionInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( SamplerYcbcrConversionInfo const& ) const = default;
+#else
+    bool operator==( SamplerYcbcrConversionInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( conversion == rhs.conversion );
+    }
+
+    bool operator!=( SamplerYcbcrConversionInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion = {};
+
+  };
+  static_assert( sizeof( SamplerYcbcrConversionInfo ) == sizeof( VkSamplerYcbcrConversionInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SamplerYcbcrConversionInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eSamplerYcbcrConversionInfo>
+  {
+    using Type = SamplerYcbcrConversionInfo;
+  };
+  using SamplerYcbcrConversionInfoKHR = SamplerYcbcrConversionInfo;
+
+  struct SemaphoreTypeCreateInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreTypeCreateInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR SemaphoreTypeCreateInfo(VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType_ = VULKAN_HPP_NAMESPACE::SemaphoreType::eBinary, uint64_t initialValue_ = {}) VULKAN_HPP_NOEXCEPT
+    : semaphoreType( semaphoreType_ ), initialValue( initialValue_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SemaphoreTypeCreateInfo( SemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SemaphoreTypeCreateInfo( VkSemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    SemaphoreTypeCreateInfo & operator=( VkSemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreTypeCreateInfo const *>( &rhs );
+      return *this;
+    }
+
+    SemaphoreTypeCreateInfo & operator=( SemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( SemaphoreTypeCreateInfo ) );
+      return *this;
+    }
+
+    SemaphoreTypeCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    SemaphoreTypeCreateInfo & setSemaphoreType( VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      semaphoreType = semaphoreType_;
+      return *this;
+    }
+
+    SemaphoreTypeCreateInfo & setInitialValue( uint64_t initialValue_ ) VULKAN_HPP_NOEXCEPT
+    {
+      initialValue = initialValue_;
+      return *this;
+    }
+
+
+    operator VkSemaphoreTypeCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSemaphoreTypeCreateInfo*>( this );
+    }
+
+    operator VkSemaphoreTypeCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSemaphoreTypeCreateInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( SemaphoreTypeCreateInfo const& ) const = default;
+#else
+    bool operator==( SemaphoreTypeCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( semaphoreType == rhs.semaphoreType )
+          && ( initialValue == rhs.initialValue );
+    }
+
+    bool operator!=( SemaphoreTypeCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreTypeCreateInfo;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType = VULKAN_HPP_NAMESPACE::SemaphoreType::eBinary;
+    uint64_t initialValue = {};
+
+  };
+  static_assert( sizeof( SemaphoreTypeCreateInfo ) == sizeof( VkSemaphoreTypeCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SemaphoreTypeCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eSemaphoreTypeCreateInfo>
+  {
+    using Type = SemaphoreTypeCreateInfo;
+  };
+  using SemaphoreTypeCreateInfoKHR = SemaphoreTypeCreateInfo;
+
+  struct SetStateFlagsIndirectCommandNV
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR SetStateFlagsIndirectCommandNV(uint32_t data_ = {}) VULKAN_HPP_NOEXCEPT
+    : data( data_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SetStateFlagsIndirectCommandNV( SetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SetStateFlagsIndirectCommandNV( VkSetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    SetStateFlagsIndirectCommandNV & operator=( VkSetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SetStateFlagsIndirectCommandNV const *>( &rhs );
+      return *this;
+    }
+
+    SetStateFlagsIndirectCommandNV & operator=( SetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( SetStateFlagsIndirectCommandNV ) );
+      return *this;
+    }
+
+    SetStateFlagsIndirectCommandNV & setData( uint32_t data_ ) VULKAN_HPP_NOEXCEPT
+    {
+      data = data_;
+      return *this;
+    }
+
+
+    operator VkSetStateFlagsIndirectCommandNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSetStateFlagsIndirectCommandNV*>( this );
+    }
+
+    operator VkSetStateFlagsIndirectCommandNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSetStateFlagsIndirectCommandNV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( SetStateFlagsIndirectCommandNV const& ) const = default;
+#else
+    bool operator==( SetStateFlagsIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( data == rhs.data );
+    }
+
+    bool operator!=( SetStateFlagsIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    uint32_t data = {};
+
+  };
+  static_assert( sizeof( SetStateFlagsIndirectCommandNV ) == sizeof( VkSetStateFlagsIndirectCommandNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SetStateFlagsIndirectCommandNV>::value, "struct wrapper is not a standard layout!" );
+
+  struct ShaderModuleValidationCacheCreateInfoEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eShaderModuleValidationCacheCreateInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ShaderModuleValidationCacheCreateInfoEXT(VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache_ = {}) VULKAN_HPP_NOEXCEPT
+    : validationCache( validationCache_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ShaderModuleValidationCacheCreateInfoEXT( ShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ShaderModuleValidationCacheCreateInfoEXT( VkShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ShaderModuleValidationCacheCreateInfoEXT & operator=( VkShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ShaderModuleValidationCacheCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+    ShaderModuleValidationCacheCreateInfoEXT & operator=( ShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ShaderModuleValidationCacheCreateInfoEXT ) );
+      return *this;
+    }
+
+    ShaderModuleValidationCacheCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ShaderModuleValidationCacheCreateInfoEXT & setValidationCache( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache_ ) VULKAN_HPP_NOEXCEPT
+    {
+      validationCache = validationCache_;
+      return *this;
+    }
+
+
+    operator VkShaderModuleValidationCacheCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkShaderModuleValidationCacheCreateInfoEXT*>( this );
+    }
+
+    operator VkShaderModuleValidationCacheCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkShaderModuleValidationCacheCreateInfoEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ShaderModuleValidationCacheCreateInfoEXT const& ) const = default;
+#else
+    bool operator==( ShaderModuleValidationCacheCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( validationCache == rhs.validationCache );
+    }
+
+    bool operator!=( ShaderModuleValidationCacheCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eShaderModuleValidationCacheCreateInfoEXT;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache = {};
+
+  };
+  static_assert( sizeof( ShaderModuleValidationCacheCreateInfoEXT ) == sizeof( VkShaderModuleValidationCacheCreateInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ShaderModuleValidationCacheCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eShaderModuleValidationCacheCreateInfoEXT>
+  {
+    using Type = ShaderModuleValidationCacheCreateInfoEXT;
+  };
+
+  struct ShaderResourceUsageAMD
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ShaderResourceUsageAMD(uint32_t numUsedVgprs_ = {}, uint32_t numUsedSgprs_ = {}, uint32_t ldsSizePerLocalWorkGroup_ = {}, size_t ldsUsageSizeInBytes_ = {}, size_t scratchMemUsageInBytes_ = {}) VULKAN_HPP_NOEXCEPT
+    : numUsedVgprs( numUsedVgprs_ ), numUsedSgprs( numUsedSgprs_ ), ldsSizePerLocalWorkGroup( ldsSizePerLocalWorkGroup_ ), ldsUsageSizeInBytes( ldsUsageSizeInBytes_ ), scratchMemUsageInBytes( scratchMemUsageInBytes_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ShaderResourceUsageAMD( ShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ShaderResourceUsageAMD( VkShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ShaderResourceUsageAMD & operator=( VkShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD const *>( &rhs );
+      return *this;
+    }
+
+    ShaderResourceUsageAMD & operator=( ShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ShaderResourceUsageAMD ) );
+      return *this;
+    }
+
+
+    operator VkShaderResourceUsageAMD const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkShaderResourceUsageAMD*>( this );
+    }
+
+    operator VkShaderResourceUsageAMD &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkShaderResourceUsageAMD*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ShaderResourceUsageAMD const& ) const = default;
+#else
+    bool operator==( ShaderResourceUsageAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( numUsedVgprs == rhs.numUsedVgprs )
+          && ( numUsedSgprs == rhs.numUsedSgprs )
+          && ( ldsSizePerLocalWorkGroup == rhs.ldsSizePerLocalWorkGroup )
+          && ( ldsUsageSizeInBytes == rhs.ldsUsageSizeInBytes )
+          && ( scratchMemUsageInBytes == rhs.scratchMemUsageInBytes );
+    }
+
+    bool operator!=( ShaderResourceUsageAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    uint32_t numUsedVgprs = {};
+    uint32_t numUsedSgprs = {};
+    uint32_t ldsSizePerLocalWorkGroup = {};
+    size_t ldsUsageSizeInBytes = {};
+    size_t scratchMemUsageInBytes = {};
+
+  };
+  static_assert( sizeof( ShaderResourceUsageAMD ) == sizeof( VkShaderResourceUsageAMD ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ShaderResourceUsageAMD>::value, "struct wrapper is not a standard layout!" );
+
+  struct ShaderStatisticsInfoAMD
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR_14 ShaderStatisticsInfoAMD(VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStageMask_ = {}, VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD resourceUsage_ = {}, uint32_t numPhysicalVgprs_ = {}, uint32_t numPhysicalSgprs_ = {}, uint32_t numAvailableVgprs_ = {}, uint32_t numAvailableSgprs_ = {}, std::array<uint32_t,3> const& computeWorkGroupSize_ = {}) VULKAN_HPP_NOEXCEPT
+    : shaderStageMask( shaderStageMask_ ), resourceUsage( resourceUsage_ ), numPhysicalVgprs( numPhysicalVgprs_ ), numPhysicalSgprs( numPhysicalSgprs_ ), numAvailableVgprs( numAvailableVgprs_ ), numAvailableSgprs( numAvailableSgprs_ ), computeWorkGroupSize( computeWorkGroupSize_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR_14 ShaderStatisticsInfoAMD( ShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ShaderStatisticsInfoAMD( VkShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ShaderStatisticsInfoAMD & operator=( VkShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ShaderStatisticsInfoAMD const *>( &rhs );
+      return *this;
+    }
+
+    ShaderStatisticsInfoAMD & operator=( ShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ShaderStatisticsInfoAMD ) );
+      return *this;
+    }
+
+
+    operator VkShaderStatisticsInfoAMD const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkShaderStatisticsInfoAMD*>( this );
+    }
+
+    operator VkShaderStatisticsInfoAMD &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkShaderStatisticsInfoAMD*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ShaderStatisticsInfoAMD const& ) const = default;
+#else
+    bool operator==( ShaderStatisticsInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( shaderStageMask == rhs.shaderStageMask )
+          && ( resourceUsage == rhs.resourceUsage )
+          && ( numPhysicalVgprs == rhs.numPhysicalVgprs )
+          && ( numPhysicalSgprs == rhs.numPhysicalSgprs )
+          && ( numAvailableVgprs == rhs.numAvailableVgprs )
+          && ( numAvailableSgprs == rhs.numAvailableSgprs )
+          && ( computeWorkGroupSize == rhs.computeWorkGroupSize );
+    }
+
+    bool operator!=( ShaderStatisticsInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStageMask = {};
+    VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD resourceUsage = {};
+    uint32_t numPhysicalVgprs = {};
+    uint32_t numPhysicalSgprs = {};
+    uint32_t numAvailableVgprs = {};
+    uint32_t numAvailableSgprs = {};
+    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> computeWorkGroupSize = {};
+
+  };
+  static_assert( sizeof( ShaderStatisticsInfoAMD ) == sizeof( VkShaderStatisticsInfoAMD ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ShaderStatisticsInfoAMD>::value, "struct wrapper is not a standard layout!" );
+
+  struct SharedPresentSurfaceCapabilitiesKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSharedPresentSurfaceCapabilitiesKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR SharedPresentSurfaceCapabilitiesKHR(VULKAN_HPP_NAMESPACE::ImageUsageFlags sharedPresentSupportedUsageFlags_ = {}) VULKAN_HPP_NOEXCEPT
+    : sharedPresentSupportedUsageFlags( sharedPresentSupportedUsageFlags_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SharedPresentSurfaceCapabilitiesKHR( SharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SharedPresentSurfaceCapabilitiesKHR( VkSharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    SharedPresentSurfaceCapabilitiesKHR & operator=( VkSharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SharedPresentSurfaceCapabilitiesKHR const *>( &rhs );
+      return *this;
+    }
+
+    SharedPresentSurfaceCapabilitiesKHR & operator=( SharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( SharedPresentSurfaceCapabilitiesKHR ) );
+      return *this;
+    }
+
+
+    operator VkSharedPresentSurfaceCapabilitiesKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSharedPresentSurfaceCapabilitiesKHR*>( this );
+    }
+
+    operator VkSharedPresentSurfaceCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSharedPresentSurfaceCapabilitiesKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( SharedPresentSurfaceCapabilitiesKHR const& ) const = default;
+#else
+    bool operator==( SharedPresentSurfaceCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( sharedPresentSupportedUsageFlags == rhs.sharedPresentSupportedUsageFlags );
+    }
+
+    bool operator!=( SharedPresentSurfaceCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSharedPresentSurfaceCapabilitiesKHR;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::ImageUsageFlags sharedPresentSupportedUsageFlags = {};
+
+  };
+  static_assert( sizeof( SharedPresentSurfaceCapabilitiesKHR ) == sizeof( VkSharedPresentSurfaceCapabilitiesKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SharedPresentSurfaceCapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eSharedPresentSurfaceCapabilitiesKHR>
+  {
+    using Type = SharedPresentSurfaceCapabilitiesKHR;
+  };
+
+#ifdef VK_USE_PLATFORM_GGP
+  struct StreamDescriptorSurfaceCreateInfoGGP
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eStreamDescriptorSurfaceCreateInfoGGP;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR StreamDescriptorSurfaceCreateInfoGGP(VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags_ = {}, GgpStreamDescriptor streamDescriptor_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), streamDescriptor( streamDescriptor_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR StreamDescriptorSurfaceCreateInfoGGP( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    StreamDescriptorSurfaceCreateInfoGGP( VkStreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    StreamDescriptorSurfaceCreateInfoGGP & operator=( VkStreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP const *>( &rhs );
+      return *this;
+    }
+
+    StreamDescriptorSurfaceCreateInfoGGP & operator=( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( StreamDescriptorSurfaceCreateInfoGGP ) );
+      return *this;
+    }
+
+    StreamDescriptorSurfaceCreateInfoGGP & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    StreamDescriptorSurfaceCreateInfoGGP & setFlags( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    StreamDescriptorSurfaceCreateInfoGGP & setStreamDescriptor( GgpStreamDescriptor streamDescriptor_ ) VULKAN_HPP_NOEXCEPT
+    {
+      streamDescriptor = streamDescriptor_;
+      return *this;
+    }
+
+
+    operator VkStreamDescriptorSurfaceCreateInfoGGP const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP*>( this );
+    }
+
+    operator VkStreamDescriptorSurfaceCreateInfoGGP &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkStreamDescriptorSurfaceCreateInfoGGP*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( StreamDescriptorSurfaceCreateInfoGGP const& ) const = default;
+#else
+    bool operator==( StreamDescriptorSurfaceCreateInfoGGP const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( memcmp( &streamDescriptor, &rhs.streamDescriptor, sizeof( GgpStreamDescriptor ) ) == 0 );
+    }
+
+    bool operator!=( StreamDescriptorSurfaceCreateInfoGGP const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eStreamDescriptorSurfaceCreateInfoGGP;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags = {};
+    GgpStreamDescriptor streamDescriptor = {};
+
+  };
+  static_assert( sizeof( StreamDescriptorSurfaceCreateInfoGGP ) == sizeof( VkStreamDescriptorSurfaceCreateInfoGGP ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<StreamDescriptorSurfaceCreateInfoGGP>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eStreamDescriptorSurfaceCreateInfoGGP>
+  {
+    using Type = StreamDescriptorSurfaceCreateInfoGGP;
+  };
+#endif /*VK_USE_PLATFORM_GGP*/
+
+  struct SubpassDescriptionDepthStencilResolve
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassDescriptionDepthStencilResolve;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR SubpassDescriptionDepthStencilResolve(VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone, VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone, const VULKAN_HPP_NAMESPACE::AttachmentReference2* pDepthStencilResolveAttachment_ = {}) VULKAN_HPP_NOEXCEPT
+    : depthResolveMode( depthResolveMode_ ), stencilResolveMode( stencilResolveMode_ ), pDepthStencilResolveAttachment( pDepthStencilResolveAttachment_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SubpassDescriptionDepthStencilResolve( SubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SubpassDescriptionDepthStencilResolve( VkSubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    SubpassDescriptionDepthStencilResolve & operator=( VkSubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassDescriptionDepthStencilResolve const *>( &rhs );
+      return *this;
+    }
+
+    SubpassDescriptionDepthStencilResolve & operator=( SubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( SubpassDescriptionDepthStencilResolve ) );
+      return *this;
+    }
+
+    SubpassDescriptionDepthStencilResolve & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    SubpassDescriptionDepthStencilResolve & setDepthResolveMode( VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depthResolveMode = depthResolveMode_;
+      return *this;
+    }
+
+    SubpassDescriptionDepthStencilResolve & setStencilResolveMode( VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stencilResolveMode = stencilResolveMode_;
+      return *this;
+    }
+
+    SubpassDescriptionDepthStencilResolve & setPDepthStencilResolveAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference2* pDepthStencilResolveAttachment_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pDepthStencilResolveAttachment = pDepthStencilResolveAttachment_;
+      return *this;
+    }
+
+
+    operator VkSubpassDescriptionDepthStencilResolve const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSubpassDescriptionDepthStencilResolve*>( this );
+    }
+
+    operator VkSubpassDescriptionDepthStencilResolve &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSubpassDescriptionDepthStencilResolve*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( SubpassDescriptionDepthStencilResolve const& ) const = default;
+#else
+    bool operator==( SubpassDescriptionDepthStencilResolve const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( depthResolveMode == rhs.depthResolveMode )
+          && ( stencilResolveMode == rhs.stencilResolveMode )
+          && ( pDepthStencilResolveAttachment == rhs.pDepthStencilResolveAttachment );
+    }
+
+    bool operator!=( SubpassDescriptionDepthStencilResolve const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDescriptionDepthStencilResolve;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone;
+    VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone;
+    const VULKAN_HPP_NAMESPACE::AttachmentReference2* pDepthStencilResolveAttachment = {};
+
+  };
+  static_assert( sizeof( SubpassDescriptionDepthStencilResolve ) == sizeof( VkSubpassDescriptionDepthStencilResolve ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SubpassDescriptionDepthStencilResolve>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eSubpassDescriptionDepthStencilResolve>
+  {
+    using Type = SubpassDescriptionDepthStencilResolve;
+  };
+  using SubpassDescriptionDepthStencilResolveKHR = SubpassDescriptionDepthStencilResolve;
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  struct SurfaceCapabilitiesFullScreenExclusiveEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesFullScreenExclusiveEXT(VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported_ = {}) VULKAN_HPP_NOEXCEPT
+    : fullScreenExclusiveSupported( fullScreenExclusiveSupported_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesFullScreenExclusiveEXT( SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SurfaceCapabilitiesFullScreenExclusiveEXT( VkSurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    SurfaceCapabilitiesFullScreenExclusiveEXT & operator=( VkSurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesFullScreenExclusiveEXT const *>( &rhs );
+      return *this;
+    }
+
+    SurfaceCapabilitiesFullScreenExclusiveEXT & operator=( SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( SurfaceCapabilitiesFullScreenExclusiveEXT ) );
+      return *this;
+    }
+
+    SurfaceCapabilitiesFullScreenExclusiveEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    SurfaceCapabilitiesFullScreenExclusiveEXT & setFullScreenExclusiveSupported( VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fullScreenExclusiveSupported = fullScreenExclusiveSupported_;
+      return *this;
+    }
+
+
+    operator VkSurfaceCapabilitiesFullScreenExclusiveEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSurfaceCapabilitiesFullScreenExclusiveEXT*>( this );
+    }
+
+    operator VkSurfaceCapabilitiesFullScreenExclusiveEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSurfaceCapabilitiesFullScreenExclusiveEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( SurfaceCapabilitiesFullScreenExclusiveEXT const& ) const = default;
+#else
+    bool operator==( SurfaceCapabilitiesFullScreenExclusiveEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( fullScreenExclusiveSupported == rhs.fullScreenExclusiveSupported );
+    }
+
+    bool operator!=( SurfaceCapabilitiesFullScreenExclusiveEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported = {};
+
+  };
+  static_assert( sizeof( SurfaceCapabilitiesFullScreenExclusiveEXT ) == sizeof( VkSurfaceCapabilitiesFullScreenExclusiveEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SurfaceCapabilitiesFullScreenExclusiveEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT>
+  {
+    using Type = SurfaceCapabilitiesFullScreenExclusiveEXT;
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  struct SurfaceFullScreenExclusiveInfoEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceFullScreenExclusiveInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveInfoEXT(VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive_ = VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT::eDefault) VULKAN_HPP_NOEXCEPT
+    : fullScreenExclusive( fullScreenExclusive_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveInfoEXT( SurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SurfaceFullScreenExclusiveInfoEXT( VkSurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    SurfaceFullScreenExclusiveInfoEXT & operator=( VkSurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+    SurfaceFullScreenExclusiveInfoEXT & operator=( SurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( SurfaceFullScreenExclusiveInfoEXT ) );
+      return *this;
+    }
+
+    SurfaceFullScreenExclusiveInfoEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    SurfaceFullScreenExclusiveInfoEXT & setFullScreenExclusive( VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fullScreenExclusive = fullScreenExclusive_;
+      return *this;
+    }
+
+
+    operator VkSurfaceFullScreenExclusiveInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSurfaceFullScreenExclusiveInfoEXT*>( this );
+    }
+
+    operator VkSurfaceFullScreenExclusiveInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSurfaceFullScreenExclusiveInfoEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( SurfaceFullScreenExclusiveInfoEXT const& ) const = default;
+#else
+    bool operator==( SurfaceFullScreenExclusiveInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( fullScreenExclusive == rhs.fullScreenExclusive );
+    }
+
+    bool operator!=( SurfaceFullScreenExclusiveInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFullScreenExclusiveInfoEXT;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive = VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT::eDefault;
+
+  };
+  static_assert( sizeof( SurfaceFullScreenExclusiveInfoEXT ) == sizeof( VkSurfaceFullScreenExclusiveInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SurfaceFullScreenExclusiveInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eSurfaceFullScreenExclusiveInfoEXT>
+  {
+    using Type = SurfaceFullScreenExclusiveInfoEXT;
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  struct SurfaceFullScreenExclusiveWin32InfoEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveWin32InfoEXT(HMONITOR hmonitor_ = {}) VULKAN_HPP_NOEXCEPT
+    : hmonitor( hmonitor_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveWin32InfoEXT( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SurfaceFullScreenExclusiveWin32InfoEXT( VkSurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    SurfaceFullScreenExclusiveWin32InfoEXT & operator=( VkSurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveWin32InfoEXT const *>( &rhs );
+      return *this;
+    }
+
+    SurfaceFullScreenExclusiveWin32InfoEXT & operator=( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( SurfaceFullScreenExclusiveWin32InfoEXT ) );
+      return *this;
+    }
+
+    SurfaceFullScreenExclusiveWin32InfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    SurfaceFullScreenExclusiveWin32InfoEXT & setHmonitor( HMONITOR hmonitor_ ) VULKAN_HPP_NOEXCEPT
+    {
+      hmonitor = hmonitor_;
+      return *this;
+    }
+
+
+    operator VkSurfaceFullScreenExclusiveWin32InfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSurfaceFullScreenExclusiveWin32InfoEXT*>( this );
+    }
+
+    operator VkSurfaceFullScreenExclusiveWin32InfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSurfaceFullScreenExclusiveWin32InfoEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( SurfaceFullScreenExclusiveWin32InfoEXT const& ) const = default;
+#else
+    bool operator==( SurfaceFullScreenExclusiveWin32InfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( hmonitor == rhs.hmonitor );
+    }
+
+    bool operator!=( SurfaceFullScreenExclusiveWin32InfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT;
+    const void* pNext = {};
+    HMONITOR hmonitor = {};
+
+  };
+  static_assert( sizeof( SurfaceFullScreenExclusiveWin32InfoEXT ) == sizeof( VkSurfaceFullScreenExclusiveWin32InfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SurfaceFullScreenExclusiveWin32InfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT>
+  {
+    using Type = SurfaceFullScreenExclusiveWin32InfoEXT;
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  struct SurfaceProtectedCapabilitiesKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceProtectedCapabilitiesKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR SurfaceProtectedCapabilitiesKHR(VULKAN_HPP_NAMESPACE::Bool32 supportsProtected_ = {}) VULKAN_HPP_NOEXCEPT
+    : supportsProtected( supportsProtected_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SurfaceProtectedCapabilitiesKHR( SurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SurfaceProtectedCapabilitiesKHR( VkSurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    SurfaceProtectedCapabilitiesKHR & operator=( VkSurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceProtectedCapabilitiesKHR const *>( &rhs );
+      return *this;
+    }
+
+    SurfaceProtectedCapabilitiesKHR & operator=( SurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( SurfaceProtectedCapabilitiesKHR ) );
+      return *this;
+    }
+
+    SurfaceProtectedCapabilitiesKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    SurfaceProtectedCapabilitiesKHR & setSupportsProtected( VULKAN_HPP_NAMESPACE::Bool32 supportsProtected_ ) VULKAN_HPP_NOEXCEPT
+    {
+      supportsProtected = supportsProtected_;
+      return *this;
+    }
+
+
+    operator VkSurfaceProtectedCapabilitiesKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSurfaceProtectedCapabilitiesKHR*>( this );
+    }
+
+    operator VkSurfaceProtectedCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSurfaceProtectedCapabilitiesKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( SurfaceProtectedCapabilitiesKHR const& ) const = default;
+#else
+    bool operator==( SurfaceProtectedCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( supportsProtected == rhs.supportsProtected );
+    }
+
+    bool operator!=( SurfaceProtectedCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceProtectedCapabilitiesKHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 supportsProtected = {};
+
+  };
+  static_assert( sizeof( SurfaceProtectedCapabilitiesKHR ) == sizeof( VkSurfaceProtectedCapabilitiesKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SurfaceProtectedCapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eSurfaceProtectedCapabilitiesKHR>
+  {
+    using Type = SurfaceProtectedCapabilitiesKHR;
+  };
+
+  struct SwapchainCounterCreateInfoEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainCounterCreateInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR SwapchainCounterCreateInfoEXT(VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters_ = {}) VULKAN_HPP_NOEXCEPT
+    : surfaceCounters( surfaceCounters_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SwapchainCounterCreateInfoEXT( SwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SwapchainCounterCreateInfoEXT( VkSwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    SwapchainCounterCreateInfoEXT & operator=( VkSwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SwapchainCounterCreateInfoEXT const *>( &rhs );
+      return *this;
+    }
+
+    SwapchainCounterCreateInfoEXT & operator=( SwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( SwapchainCounterCreateInfoEXT ) );
+      return *this;
+    }
+
+    SwapchainCounterCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    SwapchainCounterCreateInfoEXT & setSurfaceCounters( VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters_ ) VULKAN_HPP_NOEXCEPT
+    {
+      surfaceCounters = surfaceCounters_;
+      return *this;
+    }
+
+
+    operator VkSwapchainCounterCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSwapchainCounterCreateInfoEXT*>( this );
+    }
+
+    operator VkSwapchainCounterCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSwapchainCounterCreateInfoEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( SwapchainCounterCreateInfoEXT const& ) const = default;
+#else
+    bool operator==( SwapchainCounterCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( surfaceCounters == rhs.surfaceCounters );
+    }
+
+    bool operator!=( SwapchainCounterCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainCounterCreateInfoEXT;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters = {};
+
+  };
+  static_assert( sizeof( SwapchainCounterCreateInfoEXT ) == sizeof( VkSwapchainCounterCreateInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SwapchainCounterCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eSwapchainCounterCreateInfoEXT>
+  {
+    using Type = SwapchainCounterCreateInfoEXT;
+  };
+
+  struct SwapchainDisplayNativeHdrCreateInfoAMD
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR SwapchainDisplayNativeHdrCreateInfoAMD(VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable_ = {}) VULKAN_HPP_NOEXCEPT
+    : localDimmingEnable( localDimmingEnable_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR SwapchainDisplayNativeHdrCreateInfoAMD( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    SwapchainDisplayNativeHdrCreateInfoAMD( VkSwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    SwapchainDisplayNativeHdrCreateInfoAMD & operator=( VkSwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SwapchainDisplayNativeHdrCreateInfoAMD const *>( &rhs );
+      return *this;
+    }
+
+    SwapchainDisplayNativeHdrCreateInfoAMD & operator=( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( SwapchainDisplayNativeHdrCreateInfoAMD ) );
+      return *this;
+    }
+
+    SwapchainDisplayNativeHdrCreateInfoAMD & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    SwapchainDisplayNativeHdrCreateInfoAMD & setLocalDimmingEnable( VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      localDimmingEnable = localDimmingEnable_;
+      return *this;
+    }
+
+
+    operator VkSwapchainDisplayNativeHdrCreateInfoAMD const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSwapchainDisplayNativeHdrCreateInfoAMD*>( this );
+    }
+
+    operator VkSwapchainDisplayNativeHdrCreateInfoAMD &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSwapchainDisplayNativeHdrCreateInfoAMD*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( SwapchainDisplayNativeHdrCreateInfoAMD const& ) const = default;
+#else
+    bool operator==( SwapchainDisplayNativeHdrCreateInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( localDimmingEnable == rhs.localDimmingEnable );
+    }
+
+    bool operator!=( SwapchainDisplayNativeHdrCreateInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable = {};
+
+  };
+  static_assert( sizeof( SwapchainDisplayNativeHdrCreateInfoAMD ) == sizeof( VkSwapchainDisplayNativeHdrCreateInfoAMD ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SwapchainDisplayNativeHdrCreateInfoAMD>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD>
+  {
+    using Type = SwapchainDisplayNativeHdrCreateInfoAMD;
+  };
+
+  struct TextureLODGatherFormatPropertiesAMD
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eTextureLodGatherFormatPropertiesAMD;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR TextureLODGatherFormatPropertiesAMD(VULKAN_HPP_NAMESPACE::Bool32 supportsTextureGatherLODBiasAMD_ = {}) VULKAN_HPP_NOEXCEPT
+    : supportsTextureGatherLODBiasAMD( supportsTextureGatherLODBiasAMD_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR TextureLODGatherFormatPropertiesAMD( TextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    TextureLODGatherFormatPropertiesAMD( VkTextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    TextureLODGatherFormatPropertiesAMD & operator=( VkTextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::TextureLODGatherFormatPropertiesAMD const *>( &rhs );
+      return *this;
+    }
+
+    TextureLODGatherFormatPropertiesAMD & operator=( TextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( TextureLODGatherFormatPropertiesAMD ) );
+      return *this;
+    }
+
+
+    operator VkTextureLODGatherFormatPropertiesAMD const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkTextureLODGatherFormatPropertiesAMD*>( this );
+    }
+
+    operator VkTextureLODGatherFormatPropertiesAMD &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkTextureLODGatherFormatPropertiesAMD*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( TextureLODGatherFormatPropertiesAMD const& ) const = default;
+#else
+    bool operator==( TextureLODGatherFormatPropertiesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( supportsTextureGatherLODBiasAMD == rhs.supportsTextureGatherLODBiasAMD );
+    }
+
+    bool operator!=( TextureLODGatherFormatPropertiesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTextureLodGatherFormatPropertiesAMD;
+    void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Bool32 supportsTextureGatherLODBiasAMD = {};
+
+  };
+  static_assert( sizeof( TextureLODGatherFormatPropertiesAMD ) == sizeof( VkTextureLODGatherFormatPropertiesAMD ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<TextureLODGatherFormatPropertiesAMD>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eTextureLodGatherFormatPropertiesAMD>
+  {
+    using Type = TextureLODGatherFormatPropertiesAMD;
+  };
+
+  struct TimelineSemaphoreSubmitInfo
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eTimelineSemaphoreSubmitInfo;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR TimelineSemaphoreSubmitInfo(uint32_t waitSemaphoreValueCount_ = {}, const uint64_t* pWaitSemaphoreValues_ = {}, uint32_t signalSemaphoreValueCount_ = {}, const uint64_t* pSignalSemaphoreValues_ = {}) VULKAN_HPP_NOEXCEPT
+    : waitSemaphoreValueCount( waitSemaphoreValueCount_ ), pWaitSemaphoreValues( pWaitSemaphoreValues_ ), signalSemaphoreValueCount( signalSemaphoreValueCount_ ), pSignalSemaphoreValues( pSignalSemaphoreValues_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR TimelineSemaphoreSubmitInfo( TimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    TimelineSemaphoreSubmitInfo( VkTimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    TimelineSemaphoreSubmitInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & waitSemaphoreValues_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & signalSemaphoreValues_ = {} )
+    : waitSemaphoreValueCount( static_cast<uint32_t>( waitSemaphoreValues_.size() ) ), pWaitSemaphoreValues( waitSemaphoreValues_.data() ), signalSemaphoreValueCount( static_cast<uint32_t>( signalSemaphoreValues_.size() ) ), pSignalSemaphoreValues( signalSemaphoreValues_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    TimelineSemaphoreSubmitInfo & operator=( VkTimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::TimelineSemaphoreSubmitInfo const *>( &rhs );
+      return *this;
+    }
+
+    TimelineSemaphoreSubmitInfo & operator=( TimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( TimelineSemaphoreSubmitInfo ) );
+      return *this;
+    }
+
+    TimelineSemaphoreSubmitInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    TimelineSemaphoreSubmitInfo & setWaitSemaphoreValueCount( uint32_t waitSemaphoreValueCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      waitSemaphoreValueCount = waitSemaphoreValueCount_;
+      return *this;
+    }
+
+    TimelineSemaphoreSubmitInfo & setPWaitSemaphoreValues( const uint64_t* pWaitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pWaitSemaphoreValues = pWaitSemaphoreValues_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    TimelineSemaphoreSubmitInfo & setWaitSemaphoreValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & waitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
+    {
+      waitSemaphoreValueCount = static_cast<uint32_t>( waitSemaphoreValues_.size() );
+      pWaitSemaphoreValues = waitSemaphoreValues_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    TimelineSemaphoreSubmitInfo & setSignalSemaphoreValueCount( uint32_t signalSemaphoreValueCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      signalSemaphoreValueCount = signalSemaphoreValueCount_;
+      return *this;
+    }
+
+    TimelineSemaphoreSubmitInfo & setPSignalSemaphoreValues( const uint64_t* pSignalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pSignalSemaphoreValues = pSignalSemaphoreValues_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    TimelineSemaphoreSubmitInfo & setSignalSemaphoreValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & signalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
+    {
+      signalSemaphoreValueCount = static_cast<uint32_t>( signalSemaphoreValues_.size() );
+      pSignalSemaphoreValues = signalSemaphoreValues_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkTimelineSemaphoreSubmitInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkTimelineSemaphoreSubmitInfo*>( this );
+    }
+
+    operator VkTimelineSemaphoreSubmitInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkTimelineSemaphoreSubmitInfo*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( TimelineSemaphoreSubmitInfo const& ) const = default;
+#else
+    bool operator==( TimelineSemaphoreSubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( waitSemaphoreValueCount == rhs.waitSemaphoreValueCount )
+          && ( pWaitSemaphoreValues == rhs.pWaitSemaphoreValues )
+          && ( signalSemaphoreValueCount == rhs.signalSemaphoreValueCount )
+          && ( pSignalSemaphoreValues == rhs.pSignalSemaphoreValues );
+    }
+
+    bool operator!=( TimelineSemaphoreSubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTimelineSemaphoreSubmitInfo;
+    const void* pNext = {};
+    uint32_t waitSemaphoreValueCount = {};
+    const uint64_t* pWaitSemaphoreValues = {};
+    uint32_t signalSemaphoreValueCount = {};
+    const uint64_t* pSignalSemaphoreValues = {};
+
+  };
+  static_assert( sizeof( TimelineSemaphoreSubmitInfo ) == sizeof( VkTimelineSemaphoreSubmitInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<TimelineSemaphoreSubmitInfo>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eTimelineSemaphoreSubmitInfo>
+  {
+    using Type = TimelineSemaphoreSubmitInfo;
+  };
+  using TimelineSemaphoreSubmitInfoKHR = TimelineSemaphoreSubmitInfo;
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+  struct TraceRaysIndirectCommandKHR
+  {
+
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR TraceRaysIndirectCommandKHR(uint32_t width_ = {}, uint32_t height_ = {}, uint32_t depth_ = {}) VULKAN_HPP_NOEXCEPT
+    : width( width_ ), height( height_ ), depth( depth_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR TraceRaysIndirectCommandKHR( TraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    TraceRaysIndirectCommandKHR( VkTraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    explicit TraceRaysIndirectCommandKHR( Extent2D const& extent2D, uint32_t depth_ = {} )
+      : width( extent2D.width )
+      , height( extent2D.height )
+      , depth( depth_ )
+    {}
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    TraceRaysIndirectCommandKHR & operator=( VkTraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommandKHR const *>( &rhs );
+      return *this;
+    }
+
+    TraceRaysIndirectCommandKHR & operator=( TraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( TraceRaysIndirectCommandKHR ) );
+      return *this;
+    }
+
+    TraceRaysIndirectCommandKHR & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT
+    {
+      width = width_;
+      return *this;
+    }
+
+    TraceRaysIndirectCommandKHR & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT
+    {
+      height = height_;
+      return *this;
+    }
+
+    TraceRaysIndirectCommandKHR & setDepth( uint32_t depth_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depth = depth_;
+      return *this;
+    }
+
+
+    operator VkTraceRaysIndirectCommandKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkTraceRaysIndirectCommandKHR*>( this );
+    }
+
+    operator VkTraceRaysIndirectCommandKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkTraceRaysIndirectCommandKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( TraceRaysIndirectCommandKHR const& ) const = default;
+#else
+    bool operator==( TraceRaysIndirectCommandKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( width == rhs.width )
+          && ( height == rhs.height )
+          && ( depth == rhs.depth );
+    }
+
+    bool operator!=( TraceRaysIndirectCommandKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    uint32_t width = {};
+    uint32_t height = {};
+    uint32_t depth = {};
+
+  };
+  static_assert( sizeof( TraceRaysIndirectCommandKHR ) == sizeof( VkTraceRaysIndirectCommandKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<TraceRaysIndirectCommandKHR>::value, "struct wrapper is not a standard layout!" );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  struct ValidationFeaturesEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eValidationFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ValidationFeaturesEXT(uint32_t enabledValidationFeatureCount_ = {}, const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT* pEnabledValidationFeatures_ = {}, uint32_t disabledValidationFeatureCount_ = {}, const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT* pDisabledValidationFeatures_ = {}) VULKAN_HPP_NOEXCEPT
+    : enabledValidationFeatureCount( enabledValidationFeatureCount_ ), pEnabledValidationFeatures( pEnabledValidationFeatures_ ), disabledValidationFeatureCount( disabledValidationFeatureCount_ ), pDisabledValidationFeatures( pDisabledValidationFeatures_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ValidationFeaturesEXT( ValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ValidationFeaturesEXT( VkValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    ValidationFeaturesEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT> const & enabledValidationFeatures_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT> const & disabledValidationFeatures_ = {} )
+    : enabledValidationFeatureCount( static_cast<uint32_t>( enabledValidationFeatures_.size() ) ), pEnabledValidationFeatures( enabledValidationFeatures_.data() ), disabledValidationFeatureCount( static_cast<uint32_t>( disabledValidationFeatures_.size() ) ), pDisabledValidationFeatures( disabledValidationFeatures_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ValidationFeaturesEXT & operator=( VkValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ValidationFeaturesEXT const *>( &rhs );
+      return *this;
+    }
+
+    ValidationFeaturesEXT & operator=( ValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ValidationFeaturesEXT ) );
+      return *this;
+    }
+
+    ValidationFeaturesEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ValidationFeaturesEXT & setEnabledValidationFeatureCount( uint32_t enabledValidationFeatureCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      enabledValidationFeatureCount = enabledValidationFeatureCount_;
+      return *this;
+    }
+
+    ValidationFeaturesEXT & setPEnabledValidationFeatures( const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT* pEnabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pEnabledValidationFeatures = pEnabledValidationFeatures_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    ValidationFeaturesEXT & setEnabledValidationFeatures( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT> const & enabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT
+    {
+      enabledValidationFeatureCount = static_cast<uint32_t>( enabledValidationFeatures_.size() );
+      pEnabledValidationFeatures = enabledValidationFeatures_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    ValidationFeaturesEXT & setDisabledValidationFeatureCount( uint32_t disabledValidationFeatureCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      disabledValidationFeatureCount = disabledValidationFeatureCount_;
+      return *this;
+    }
+
+    ValidationFeaturesEXT & setPDisabledValidationFeatures( const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT* pDisabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pDisabledValidationFeatures = pDisabledValidationFeatures_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    ValidationFeaturesEXT & setDisabledValidationFeatures( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT> const & disabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT
+    {
+      disabledValidationFeatureCount = static_cast<uint32_t>( disabledValidationFeatures_.size() );
+      pDisabledValidationFeatures = disabledValidationFeatures_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkValidationFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkValidationFeaturesEXT*>( this );
+    }
+
+    operator VkValidationFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkValidationFeaturesEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ValidationFeaturesEXT const& ) const = default;
+#else
+    bool operator==( ValidationFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( enabledValidationFeatureCount == rhs.enabledValidationFeatureCount )
+          && ( pEnabledValidationFeatures == rhs.pEnabledValidationFeatures )
+          && ( disabledValidationFeatureCount == rhs.disabledValidationFeatureCount )
+          && ( pDisabledValidationFeatures == rhs.pDisabledValidationFeatures );
+    }
+
+    bool operator!=( ValidationFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationFeaturesEXT;
+    const void* pNext = {};
+    uint32_t enabledValidationFeatureCount = {};
+    const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT* pEnabledValidationFeatures = {};
+    uint32_t disabledValidationFeatureCount = {};
+    const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT* pDisabledValidationFeatures = {};
+
+  };
+  static_assert( sizeof( ValidationFeaturesEXT ) == sizeof( VkValidationFeaturesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ValidationFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eValidationFeaturesEXT>
+  {
+    using Type = ValidationFeaturesEXT;
+  };
+
+  struct ValidationFlagsEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eValidationFlagsEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ValidationFlagsEXT(uint32_t disabledValidationCheckCount_ = {}, const VULKAN_HPP_NAMESPACE::ValidationCheckEXT* pDisabledValidationChecks_ = {}) VULKAN_HPP_NOEXCEPT
+    : disabledValidationCheckCount( disabledValidationCheckCount_ ), pDisabledValidationChecks( pDisabledValidationChecks_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ValidationFlagsEXT( ValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ValidationFlagsEXT( VkValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    ValidationFlagsEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ValidationCheckEXT> const & disabledValidationChecks_ )
+    : disabledValidationCheckCount( static_cast<uint32_t>( disabledValidationChecks_.size() ) ), pDisabledValidationChecks( disabledValidationChecks_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ValidationFlagsEXT & operator=( VkValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ValidationFlagsEXT const *>( &rhs );
+      return *this;
+    }
+
+    ValidationFlagsEXT & operator=( ValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ValidationFlagsEXT ) );
+      return *this;
+    }
+
+    ValidationFlagsEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ValidationFlagsEXT & setDisabledValidationCheckCount( uint32_t disabledValidationCheckCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      disabledValidationCheckCount = disabledValidationCheckCount_;
+      return *this;
+    }
+
+    ValidationFlagsEXT & setPDisabledValidationChecks( const VULKAN_HPP_NAMESPACE::ValidationCheckEXT* pDisabledValidationChecks_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pDisabledValidationChecks = pDisabledValidationChecks_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    ValidationFlagsEXT & setDisabledValidationChecks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ValidationCheckEXT> const & disabledValidationChecks_ ) VULKAN_HPP_NOEXCEPT
+    {
+      disabledValidationCheckCount = static_cast<uint32_t>( disabledValidationChecks_.size() );
+      pDisabledValidationChecks = disabledValidationChecks_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkValidationFlagsEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkValidationFlagsEXT*>( this );
+    }
+
+    operator VkValidationFlagsEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkValidationFlagsEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ValidationFlagsEXT const& ) const = default;
+#else
+    bool operator==( ValidationFlagsEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( disabledValidationCheckCount == rhs.disabledValidationCheckCount )
+          && ( pDisabledValidationChecks == rhs.pDisabledValidationChecks );
+    }
+
+    bool operator!=( ValidationFlagsEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationFlagsEXT;
+    const void* pNext = {};
+    uint32_t disabledValidationCheckCount = {};
+    const VULKAN_HPP_NAMESPACE::ValidationCheckEXT* pDisabledValidationChecks = {};
+
+  };
+  static_assert( sizeof( ValidationFlagsEXT ) == sizeof( VkValidationFlagsEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ValidationFlagsEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eValidationFlagsEXT>
+  {
+    using Type = ValidationFlagsEXT;
+  };
+
+#ifdef VK_USE_PLATFORM_VI_NN
+  struct ViSurfaceCreateInfoNN
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eViSurfaceCreateInfoNN;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR ViSurfaceCreateInfoNN(VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags_ = {}, void* window_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), window( window_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ViSurfaceCreateInfoNN( ViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    ViSurfaceCreateInfoNN( VkViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    ViSurfaceCreateInfoNN & operator=( VkViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN const *>( &rhs );
+      return *this;
+    }
+
+    ViSurfaceCreateInfoNN & operator=( ViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( ViSurfaceCreateInfoNN ) );
+      return *this;
+    }
+
+    ViSurfaceCreateInfoNN & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ViSurfaceCreateInfoNN & setFlags( VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    ViSurfaceCreateInfoNN & setWindow( void* window_ ) VULKAN_HPP_NOEXCEPT
+    {
+      window = window_;
+      return *this;
+    }
+
+
+    operator VkViSurfaceCreateInfoNN const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkViSurfaceCreateInfoNN*>( this );
+    }
+
+    operator VkViSurfaceCreateInfoNN &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkViSurfaceCreateInfoNN*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( ViSurfaceCreateInfoNN const& ) const = default;
+#else
+    bool operator==( ViSurfaceCreateInfoNN const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( window == rhs.window );
+    }
+
+    bool operator!=( ViSurfaceCreateInfoNN const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eViSurfaceCreateInfoNN;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags = {};
+    void* window = {};
+
+  };
+  static_assert( sizeof( ViSurfaceCreateInfoNN ) == sizeof( VkViSurfaceCreateInfoNN ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ViSurfaceCreateInfoNN>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eViSurfaceCreateInfoNN>
+  {
+    using Type = ViSurfaceCreateInfoNN;
+  };
+#endif /*VK_USE_PLATFORM_VI_NN*/
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+  struct WaylandSurfaceCreateInfoKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWaylandSurfaceCreateInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR WaylandSurfaceCreateInfoKHR(VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags_ = {}, struct wl_display* display_ = {}, struct wl_surface* surface_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), display( display_ ), surface( surface_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR WaylandSurfaceCreateInfoKHR( WaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    WaylandSurfaceCreateInfoKHR( VkWaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    WaylandSurfaceCreateInfoKHR & operator=( VkWaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+    WaylandSurfaceCreateInfoKHR & operator=( WaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( WaylandSurfaceCreateInfoKHR ) );
+      return *this;
+    }
+
+    WaylandSurfaceCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    WaylandSurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    WaylandSurfaceCreateInfoKHR & setDisplay( struct wl_display* display_ ) VULKAN_HPP_NOEXCEPT
+    {
+      display = display_;
+      return *this;
+    }
+
+    WaylandSurfaceCreateInfoKHR & setSurface( struct wl_surface* surface_ ) VULKAN_HPP_NOEXCEPT
+    {
+      surface = surface_;
+      return *this;
+    }
+
+
+    operator VkWaylandSurfaceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR*>( this );
+    }
+
+    operator VkWaylandSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkWaylandSurfaceCreateInfoKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( WaylandSurfaceCreateInfoKHR const& ) const = default;
+#else
+    bool operator==( WaylandSurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( display == rhs.display )
+          && ( surface == rhs.surface );
+    }
+
+    bool operator!=( WaylandSurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWaylandSurfaceCreateInfoKHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags = {};
+    struct wl_display* display = {};
+    struct wl_surface* surface = {};
+
+  };
+  static_assert( sizeof( WaylandSurfaceCreateInfoKHR ) == sizeof( VkWaylandSurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<WaylandSurfaceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eWaylandSurfaceCreateInfoKHR>
+  {
+    using Type = WaylandSurfaceCreateInfoKHR;
+  };
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  struct Win32KeyedMutexAcquireReleaseInfoKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoKHR(uint32_t acquireCount_ = {}, const VULKAN_HPP_NAMESPACE::DeviceMemory* pAcquireSyncs_ = {}, const uint64_t* pAcquireKeys_ = {}, const uint32_t* pAcquireTimeouts_ = {}, uint32_t releaseCount_ = {}, const VULKAN_HPP_NAMESPACE::DeviceMemory* pReleaseSyncs_ = {}, const uint64_t* pReleaseKeys_ = {}) VULKAN_HPP_NOEXCEPT
+    : acquireCount( acquireCount_ ), pAcquireSyncs( pAcquireSyncs_ ), pAcquireKeys( pAcquireKeys_ ), pAcquireTimeouts( pAcquireTimeouts_ ), releaseCount( releaseCount_ ), pReleaseSyncs( pReleaseSyncs_ ), pReleaseKeys( pReleaseKeys_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoKHR( Win32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    Win32KeyedMutexAcquireReleaseInfoKHR( VkWin32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    Win32KeyedMutexAcquireReleaseInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & acquireSyncs_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & acquireKeys_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & acquireTimeouts_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & releaseSyncs_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & releaseKeys_ = {} )
+    : acquireCount( static_cast<uint32_t>( acquireSyncs_.size() ) ), pAcquireSyncs( acquireSyncs_.data() ), pAcquireKeys( acquireKeys_.data() ), pAcquireTimeouts( acquireTimeouts_.data() ), releaseCount( static_cast<uint32_t>( releaseSyncs_.size() ) ), pReleaseSyncs( releaseSyncs_.data() ), pReleaseKeys( releaseKeys_.data() )
+    {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+      VULKAN_HPP_ASSERT( acquireSyncs_.size() == acquireKeys_.size() );
+      VULKAN_HPP_ASSERT( acquireSyncs_.size() == acquireTimeouts_.size() );
+      VULKAN_HPP_ASSERT( acquireKeys_.size() == acquireTimeouts_.size() );
+#else
+      if ( acquireSyncs_.size() != acquireKeys_.size() )
       {
-        properties.resize( propertyCount );
-        result = static_cast<Result>( d.vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast<VkLayerProperties*>( properties.data() ) ) );
+        throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: acquireSyncs_.size() != acquireKeys_.size()" );
       }
-    } while ( result == Result::eIncomplete );
-    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
-    properties.resize( propertyCount );
-    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::enumerateInstanceLayerProperties" );
-  }
+      if ( acquireSyncs_.size() != acquireTimeouts_.size() )
+      {
+        throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: acquireSyncs_.size() != acquireTimeouts_.size()" );
+      }
+      if ( acquireKeys_.size() != acquireTimeouts_.size() )
+      {
+        throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: acquireKeys_.size() != acquireTimeouts_.size()" );
+      }
+#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
+    
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+      VULKAN_HPP_ASSERT( releaseSyncs_.size() == releaseKeys_.size() );
+#else
+      if ( releaseSyncs_.size() != releaseKeys_.size() )
+      {
+        throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: releaseSyncs_.size() != releaseKeys_.size()" );
+      }
+#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    Win32KeyedMutexAcquireReleaseInfoKHR & operator=( VkWin32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+    Win32KeyedMutexAcquireReleaseInfoKHR & operator=( Win32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( Win32KeyedMutexAcquireReleaseInfoKHR ) );
+      return *this;
+    }
+
+    Win32KeyedMutexAcquireReleaseInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    Win32KeyedMutexAcquireReleaseInfoKHR & setAcquireCount( uint32_t acquireCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      acquireCount = acquireCount_;
+      return *this;
+    }
+
+    Win32KeyedMutexAcquireReleaseInfoKHR & setPAcquireSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory* pAcquireSyncs_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pAcquireSyncs = pAcquireSyncs_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    Win32KeyedMutexAcquireReleaseInfoKHR & setAcquireSyncs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & acquireSyncs_ ) VULKAN_HPP_NOEXCEPT
+    {
+      acquireCount = static_cast<uint32_t>( acquireSyncs_.size() );
+      pAcquireSyncs = acquireSyncs_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    Win32KeyedMutexAcquireReleaseInfoKHR & setPAcquireKeys( const uint64_t* pAcquireKeys_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pAcquireKeys = pAcquireKeys_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    Win32KeyedMutexAcquireReleaseInfoKHR & setAcquireKeys( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & acquireKeys_ ) VULKAN_HPP_NOEXCEPT
+    {
+      acquireCount = static_cast<uint32_t>( acquireKeys_.size() );
+      pAcquireKeys = acquireKeys_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    Win32KeyedMutexAcquireReleaseInfoKHR & setPAcquireTimeouts( const uint32_t* pAcquireTimeouts_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pAcquireTimeouts = pAcquireTimeouts_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    Win32KeyedMutexAcquireReleaseInfoKHR & setAcquireTimeouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & acquireTimeouts_ ) VULKAN_HPP_NOEXCEPT
+    {
+      acquireCount = static_cast<uint32_t>( acquireTimeouts_.size() );
+      pAcquireTimeouts = acquireTimeouts_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    Win32KeyedMutexAcquireReleaseInfoKHR & setReleaseCount( uint32_t releaseCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      releaseCount = releaseCount_;
+      return *this;
+    }
+
+    Win32KeyedMutexAcquireReleaseInfoKHR & setPReleaseSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory* pReleaseSyncs_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pReleaseSyncs = pReleaseSyncs_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    Win32KeyedMutexAcquireReleaseInfoKHR & setReleaseSyncs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & releaseSyncs_ ) VULKAN_HPP_NOEXCEPT
+    {
+      releaseCount = static_cast<uint32_t>( releaseSyncs_.size() );
+      pReleaseSyncs = releaseSyncs_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    Win32KeyedMutexAcquireReleaseInfoKHR & setPReleaseKeys( const uint64_t* pReleaseKeys_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pReleaseKeys = pReleaseKeys_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    Win32KeyedMutexAcquireReleaseInfoKHR & setReleaseKeys( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & releaseKeys_ ) VULKAN_HPP_NOEXCEPT
+    {
+      releaseCount = static_cast<uint32_t>( releaseKeys_.size() );
+      pReleaseKeys = releaseKeys_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkWin32KeyedMutexAcquireReleaseInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkWin32KeyedMutexAcquireReleaseInfoKHR*>( this );
+    }
+
+    operator VkWin32KeyedMutexAcquireReleaseInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkWin32KeyedMutexAcquireReleaseInfoKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( Win32KeyedMutexAcquireReleaseInfoKHR const& ) const = default;
+#else
+    bool operator==( Win32KeyedMutexAcquireReleaseInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( acquireCount == rhs.acquireCount )
+          && ( pAcquireSyncs == rhs.pAcquireSyncs )
+          && ( pAcquireKeys == rhs.pAcquireKeys )
+          && ( pAcquireTimeouts == rhs.pAcquireTimeouts )
+          && ( releaseCount == rhs.releaseCount )
+          && ( pReleaseSyncs == rhs.pReleaseSyncs )
+          && ( pReleaseKeys == rhs.pReleaseKeys );
+    }
+
+    bool operator!=( Win32KeyedMutexAcquireReleaseInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR;
+    const void* pNext = {};
+    uint32_t acquireCount = {};
+    const VULKAN_HPP_NAMESPACE::DeviceMemory* pAcquireSyncs = {};
+    const uint64_t* pAcquireKeys = {};
+    const uint32_t* pAcquireTimeouts = {};
+    uint32_t releaseCount = {};
+    const VULKAN_HPP_NAMESPACE::DeviceMemory* pReleaseSyncs = {};
+    const uint64_t* pReleaseKeys = {};
+
+  };
+  static_assert( sizeof( Win32KeyedMutexAcquireReleaseInfoKHR ) == sizeof( VkWin32KeyedMutexAcquireReleaseInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<Win32KeyedMutexAcquireReleaseInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR>
+  {
+    using Type = Win32KeyedMutexAcquireReleaseInfoKHR;
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  struct Win32KeyedMutexAcquireReleaseInfoNV
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWin32KeyedMutexAcquireReleaseInfoNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoNV(uint32_t acquireCount_ = {}, const VULKAN_HPP_NAMESPACE::DeviceMemory* pAcquireSyncs_ = {}, const uint64_t* pAcquireKeys_ = {}, const uint32_t* pAcquireTimeoutMilliseconds_ = {}, uint32_t releaseCount_ = {}, const VULKAN_HPP_NAMESPACE::DeviceMemory* pReleaseSyncs_ = {}, const uint64_t* pReleaseKeys_ = {}) VULKAN_HPP_NOEXCEPT
+    : acquireCount( acquireCount_ ), pAcquireSyncs( pAcquireSyncs_ ), pAcquireKeys( pAcquireKeys_ ), pAcquireTimeoutMilliseconds( pAcquireTimeoutMilliseconds_ ), releaseCount( releaseCount_ ), pReleaseSyncs( pReleaseSyncs_ ), pReleaseKeys( pReleaseKeys_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoNV( Win32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    Win32KeyedMutexAcquireReleaseInfoNV( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    Win32KeyedMutexAcquireReleaseInfoNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & acquireSyncs_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & acquireKeys_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & acquireTimeoutMilliseconds_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & releaseSyncs_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & releaseKeys_ = {} )
+    : acquireCount( static_cast<uint32_t>( acquireSyncs_.size() ) ), pAcquireSyncs( acquireSyncs_.data() ), pAcquireKeys( acquireKeys_.data() ), pAcquireTimeoutMilliseconds( acquireTimeoutMilliseconds_.data() ), releaseCount( static_cast<uint32_t>( releaseSyncs_.size() ) ), pReleaseSyncs( releaseSyncs_.data() ), pReleaseKeys( releaseKeys_.data() )
+    {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+      VULKAN_HPP_ASSERT( acquireSyncs_.size() == acquireKeys_.size() );
+      VULKAN_HPP_ASSERT( acquireSyncs_.size() == acquireTimeoutMilliseconds_.size() );
+      VULKAN_HPP_ASSERT( acquireKeys_.size() == acquireTimeoutMilliseconds_.size() );
+#else
+      if ( acquireSyncs_.size() != acquireKeys_.size() )
+      {
+        throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: acquireSyncs_.size() != acquireKeys_.size()" );
+      }
+      if ( acquireSyncs_.size() != acquireTimeoutMilliseconds_.size() )
+      {
+        throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: acquireSyncs_.size() != acquireTimeoutMilliseconds_.size()" );
+      }
+      if ( acquireKeys_.size() != acquireTimeoutMilliseconds_.size() )
+      {
+        throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: acquireKeys_.size() != acquireTimeoutMilliseconds_.size()" );
+      }
+#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
+    
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+      VULKAN_HPP_ASSERT( releaseSyncs_.size() == releaseKeys_.size() );
+#else
+      if ( releaseSyncs_.size() != releaseKeys_.size() )
+      {
+        throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: releaseSyncs_.size() != releaseKeys_.size()" );
+      }
+#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    Win32KeyedMutexAcquireReleaseInfoNV & operator=( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoNV const *>( &rhs );
+      return *this;
+    }
+
+    Win32KeyedMutexAcquireReleaseInfoNV & operator=( Win32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( Win32KeyedMutexAcquireReleaseInfoNV ) );
+      return *this;
+    }
+
+    Win32KeyedMutexAcquireReleaseInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    Win32KeyedMutexAcquireReleaseInfoNV & setAcquireCount( uint32_t acquireCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      acquireCount = acquireCount_;
+      return *this;
+    }
+
+    Win32KeyedMutexAcquireReleaseInfoNV & setPAcquireSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory* pAcquireSyncs_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pAcquireSyncs = pAcquireSyncs_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    Win32KeyedMutexAcquireReleaseInfoNV & setAcquireSyncs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & acquireSyncs_ ) VULKAN_HPP_NOEXCEPT
+    {
+      acquireCount = static_cast<uint32_t>( acquireSyncs_.size() );
+      pAcquireSyncs = acquireSyncs_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    Win32KeyedMutexAcquireReleaseInfoNV & setPAcquireKeys( const uint64_t* pAcquireKeys_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pAcquireKeys = pAcquireKeys_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    Win32KeyedMutexAcquireReleaseInfoNV & setAcquireKeys( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & acquireKeys_ ) VULKAN_HPP_NOEXCEPT
+    {
+      acquireCount = static_cast<uint32_t>( acquireKeys_.size() );
+      pAcquireKeys = acquireKeys_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    Win32KeyedMutexAcquireReleaseInfoNV & setPAcquireTimeoutMilliseconds( const uint32_t* pAcquireTimeoutMilliseconds_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pAcquireTimeoutMilliseconds = pAcquireTimeoutMilliseconds_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    Win32KeyedMutexAcquireReleaseInfoNV & setAcquireTimeoutMilliseconds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & acquireTimeoutMilliseconds_ ) VULKAN_HPP_NOEXCEPT
+    {
+      acquireCount = static_cast<uint32_t>( acquireTimeoutMilliseconds_.size() );
+      pAcquireTimeoutMilliseconds = acquireTimeoutMilliseconds_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    Win32KeyedMutexAcquireReleaseInfoNV & setReleaseCount( uint32_t releaseCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      releaseCount = releaseCount_;
+      return *this;
+    }
+
+    Win32KeyedMutexAcquireReleaseInfoNV & setPReleaseSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory* pReleaseSyncs_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pReleaseSyncs = pReleaseSyncs_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    Win32KeyedMutexAcquireReleaseInfoNV & setReleaseSyncs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & releaseSyncs_ ) VULKAN_HPP_NOEXCEPT
+    {
+      releaseCount = static_cast<uint32_t>( releaseSyncs_.size() );
+      pReleaseSyncs = releaseSyncs_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+    Win32KeyedMutexAcquireReleaseInfoNV & setPReleaseKeys( const uint64_t* pReleaseKeys_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pReleaseKeys = pReleaseKeys_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    Win32KeyedMutexAcquireReleaseInfoNV & setReleaseKeys( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & releaseKeys_ ) VULKAN_HPP_NOEXCEPT
+    {
+      releaseCount = static_cast<uint32_t>( releaseKeys_.size() );
+      pReleaseKeys = releaseKeys_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkWin32KeyedMutexAcquireReleaseInfoNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkWin32KeyedMutexAcquireReleaseInfoNV*>( this );
+    }
+
+    operator VkWin32KeyedMutexAcquireReleaseInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkWin32KeyedMutexAcquireReleaseInfoNV*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( Win32KeyedMutexAcquireReleaseInfoNV const& ) const = default;
+#else
+    bool operator==( Win32KeyedMutexAcquireReleaseInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( acquireCount == rhs.acquireCount )
+          && ( pAcquireSyncs == rhs.pAcquireSyncs )
+          && ( pAcquireKeys == rhs.pAcquireKeys )
+          && ( pAcquireTimeoutMilliseconds == rhs.pAcquireTimeoutMilliseconds )
+          && ( releaseCount == rhs.releaseCount )
+          && ( pReleaseSyncs == rhs.pReleaseSyncs )
+          && ( pReleaseKeys == rhs.pReleaseKeys );
+    }
+
+    bool operator!=( Win32KeyedMutexAcquireReleaseInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWin32KeyedMutexAcquireReleaseInfoNV;
+    const void* pNext = {};
+    uint32_t acquireCount = {};
+    const VULKAN_HPP_NAMESPACE::DeviceMemory* pAcquireSyncs = {};
+    const uint64_t* pAcquireKeys = {};
+    const uint32_t* pAcquireTimeoutMilliseconds = {};
+    uint32_t releaseCount = {};
+    const VULKAN_HPP_NAMESPACE::DeviceMemory* pReleaseSyncs = {};
+    const uint64_t* pReleaseKeys = {};
+
+  };
+  static_assert( sizeof( Win32KeyedMutexAcquireReleaseInfoNV ) == sizeof( VkWin32KeyedMutexAcquireReleaseInfoNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<Win32KeyedMutexAcquireReleaseInfoNV>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eWin32KeyedMutexAcquireReleaseInfoNV>
+  {
+    using Type = Win32KeyedMutexAcquireReleaseInfoNV;
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  struct Win32SurfaceCreateInfoKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWin32SurfaceCreateInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR Win32SurfaceCreateInfoKHR(VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR flags_ = {}, HINSTANCE hinstance_ = {}, HWND hwnd_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), hinstance( hinstance_ ), hwnd( hwnd_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR Win32SurfaceCreateInfoKHR( Win32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    Win32SurfaceCreateInfoKHR( VkWin32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    Win32SurfaceCreateInfoKHR & operator=( VkWin32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+    Win32SurfaceCreateInfoKHR & operator=( Win32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( Win32SurfaceCreateInfoKHR ) );
+      return *this;
+    }
+
+    Win32SurfaceCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    Win32SurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    Win32SurfaceCreateInfoKHR & setHinstance( HINSTANCE hinstance_ ) VULKAN_HPP_NOEXCEPT
+    {
+      hinstance = hinstance_;
+      return *this;
+    }
+
+    Win32SurfaceCreateInfoKHR & setHwnd( HWND hwnd_ ) VULKAN_HPP_NOEXCEPT
+    {
+      hwnd = hwnd_;
+      return *this;
+    }
+
+
+    operator VkWin32SurfaceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkWin32SurfaceCreateInfoKHR*>( this );
+    }
+
+    operator VkWin32SurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkWin32SurfaceCreateInfoKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( Win32SurfaceCreateInfoKHR const& ) const = default;
+#else
+    bool operator==( Win32SurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( hinstance == rhs.hinstance )
+          && ( hwnd == rhs.hwnd );
+    }
+
+    bool operator!=( Win32SurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWin32SurfaceCreateInfoKHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR flags = {};
+    HINSTANCE hinstance = {};
+    HWND hwnd = {};
+
+  };
+  static_assert( sizeof( Win32SurfaceCreateInfoKHR ) == sizeof( VkWin32SurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<Win32SurfaceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eWin32SurfaceCreateInfoKHR>
+  {
+    using Type = Win32SurfaceCreateInfoKHR;
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  struct WriteDescriptorSetAccelerationStructureKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteDescriptorSetAccelerationStructureKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureKHR(uint32_t accelerationStructureCount_ = {}, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures_ = {}) VULKAN_HPP_NOEXCEPT
+    : accelerationStructureCount( accelerationStructureCount_ ), pAccelerationStructures( pAccelerationStructures_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureKHR( WriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    WriteDescriptorSetAccelerationStructureKHR( VkWriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    WriteDescriptorSetAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures_ )
+    : accelerationStructureCount( static_cast<uint32_t>( accelerationStructures_.size() ) ), pAccelerationStructures( accelerationStructures_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    WriteDescriptorSetAccelerationStructureKHR & operator=( VkWriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureKHR const *>( &rhs );
+      return *this;
+    }
+
+    WriteDescriptorSetAccelerationStructureKHR & operator=( WriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( WriteDescriptorSetAccelerationStructureKHR ) );
+      return *this;
+    }
+
+    WriteDescriptorSetAccelerationStructureKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    WriteDescriptorSetAccelerationStructureKHR & setAccelerationStructureCount( uint32_t accelerationStructureCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      accelerationStructureCount = accelerationStructureCount_;
+      return *this;
+    }
+
+    WriteDescriptorSetAccelerationStructureKHR & setPAccelerationStructures( const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pAccelerationStructures = pAccelerationStructures_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    WriteDescriptorSetAccelerationStructureKHR & setAccelerationStructures( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures_ ) VULKAN_HPP_NOEXCEPT
+    {
+      accelerationStructureCount = static_cast<uint32_t>( accelerationStructures_.size() );
+      pAccelerationStructures = accelerationStructures_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkWriteDescriptorSetAccelerationStructureKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkWriteDescriptorSetAccelerationStructureKHR*>( this );
+    }
+
+    operator VkWriteDescriptorSetAccelerationStructureKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkWriteDescriptorSetAccelerationStructureKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( WriteDescriptorSetAccelerationStructureKHR const& ) const = default;
+#else
+    bool operator==( WriteDescriptorSetAccelerationStructureKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( accelerationStructureCount == rhs.accelerationStructureCount )
+          && ( pAccelerationStructures == rhs.pAccelerationStructures );
+    }
+
+    bool operator!=( WriteDescriptorSetAccelerationStructureKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSetAccelerationStructureKHR;
+    const void* pNext = {};
+    uint32_t accelerationStructureCount = {};
+    const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures = {};
+
+  };
+  static_assert( sizeof( WriteDescriptorSetAccelerationStructureKHR ) == sizeof( VkWriteDescriptorSetAccelerationStructureKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<WriteDescriptorSetAccelerationStructureKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eWriteDescriptorSetAccelerationStructureKHR>
+  {
+    using Type = WriteDescriptorSetAccelerationStructureKHR;
+  };
+  using WriteDescriptorSetAccelerationStructureNV = WriteDescriptorSetAccelerationStructureKHR;
+
+  struct WriteDescriptorSetInlineUniformBlockEXT
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteDescriptorSetInlineUniformBlockEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR WriteDescriptorSetInlineUniformBlockEXT(uint32_t dataSize_ = {}, const void* pData_ = {}) VULKAN_HPP_NOEXCEPT
+    : dataSize( dataSize_ ), pData( pData_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR WriteDescriptorSetInlineUniformBlockEXT( WriteDescriptorSetInlineUniformBlockEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    WriteDescriptorSetInlineUniformBlockEXT( VkWriteDescriptorSetInlineUniformBlockEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    template <typename T>
+    WriteDescriptorSetInlineUniformBlockEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & data_ )
+    : dataSize( static_cast<uint32_t>( data_.size() * sizeof(T) ) ), pData( data_.data() )
+    {}
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    WriteDescriptorSetInlineUniformBlockEXT & operator=( VkWriteDescriptorSetInlineUniformBlockEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::WriteDescriptorSetInlineUniformBlockEXT const *>( &rhs );
+      return *this;
+    }
+
+    WriteDescriptorSetInlineUniformBlockEXT & operator=( WriteDescriptorSetInlineUniformBlockEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( WriteDescriptorSetInlineUniformBlockEXT ) );
+      return *this;
+    }
+
+    WriteDescriptorSetInlineUniformBlockEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    WriteDescriptorSetInlineUniformBlockEXT & setDataSize( uint32_t dataSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dataSize = dataSize_;
+      return *this;
+    }
+
+    WriteDescriptorSetInlineUniformBlockEXT & setPData( const void* pData_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pData = pData_;
+      return *this;
+    }
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+    template <typename T>
+    WriteDescriptorSetInlineUniformBlockEXT & setData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & data_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dataSize = static_cast<uint32_t>( data_.size() * sizeof(T) );
+      pData = data_.data();
+      return *this;
+    }
+#endif  // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+
+
+    operator VkWriteDescriptorSetInlineUniformBlockEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkWriteDescriptorSetInlineUniformBlockEXT*>( this );
+    }
+
+    operator VkWriteDescriptorSetInlineUniformBlockEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkWriteDescriptorSetInlineUniformBlockEXT*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( WriteDescriptorSetInlineUniformBlockEXT const& ) const = default;
+#else
+    bool operator==( WriteDescriptorSetInlineUniformBlockEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( dataSize == rhs.dataSize )
+          && ( pData == rhs.pData );
+    }
+
+    bool operator!=( WriteDescriptorSetInlineUniformBlockEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSetInlineUniformBlockEXT;
+    const void* pNext = {};
+    uint32_t dataSize = {};
+    const void* pData = {};
+
+  };
+  static_assert( sizeof( WriteDescriptorSetInlineUniformBlockEXT ) == sizeof( VkWriteDescriptorSetInlineUniformBlockEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<WriteDescriptorSetInlineUniformBlockEXT>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eWriteDescriptorSetInlineUniformBlockEXT>
+  {
+    using Type = WriteDescriptorSetInlineUniformBlockEXT;
+  };
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+  struct XcbSurfaceCreateInfoKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eXcbSurfaceCreateInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR XcbSurfaceCreateInfoKHR(VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR flags_ = {}, xcb_connection_t* connection_ = {}, xcb_window_t window_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), connection( connection_ ), window( window_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR XcbSurfaceCreateInfoKHR( XcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    XcbSurfaceCreateInfoKHR( VkXcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    XcbSurfaceCreateInfoKHR & operator=( VkXcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+    XcbSurfaceCreateInfoKHR & operator=( XcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( XcbSurfaceCreateInfoKHR ) );
+      return *this;
+    }
+
+    XcbSurfaceCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    XcbSurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    XcbSurfaceCreateInfoKHR & setConnection( xcb_connection_t* connection_ ) VULKAN_HPP_NOEXCEPT
+    {
+      connection = connection_;
+      return *this;
+    }
+
+    XcbSurfaceCreateInfoKHR & setWindow( xcb_window_t window_ ) VULKAN_HPP_NOEXCEPT
+    {
+      window = window_;
+      return *this;
+    }
+
+
+    operator VkXcbSurfaceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkXcbSurfaceCreateInfoKHR*>( this );
+    }
+
+    operator VkXcbSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkXcbSurfaceCreateInfoKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( XcbSurfaceCreateInfoKHR const& ) const = default;
+#else
+    bool operator==( XcbSurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( connection == rhs.connection )
+          && ( memcmp( &window, &rhs.window, sizeof( xcb_window_t ) ) == 0 );
+    }
+
+    bool operator!=( XcbSurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eXcbSurfaceCreateInfoKHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR flags = {};
+    xcb_connection_t* connection = {};
+    xcb_window_t window = {};
+
+  };
+  static_assert( sizeof( XcbSurfaceCreateInfoKHR ) == sizeof( VkXcbSurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<XcbSurfaceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eXcbSurfaceCreateInfoKHR>
+  {
+    using Type = XcbSurfaceCreateInfoKHR;
+  };
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+  struct XlibSurfaceCreateInfoKHR
+  {
+    static const bool allowDuplicate = false;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eXlibSurfaceCreateInfoKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+    VULKAN_HPP_CONSTEXPR XlibSurfaceCreateInfoKHR(VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR flags_ = {}, Display* dpy_ = {}, Window window_ = {}) VULKAN_HPP_NOEXCEPT
+    : flags( flags_ ), dpy( dpy_ ), window( window_ )
+    {}
+
+    VULKAN_HPP_CONSTEXPR XlibSurfaceCreateInfoKHR( XlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    XlibSurfaceCreateInfoKHR( VkXlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+
+    XlibSurfaceCreateInfoKHR & operator=( VkXlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR const *>( &rhs );
+      return *this;
+    }
+
+    XlibSurfaceCreateInfoKHR & operator=( XlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( static_cast<void *>( this ), &rhs, sizeof( XlibSurfaceCreateInfoKHR ) );
+      return *this;
+    }
+
+    XlibSurfaceCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    XlibSurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    XlibSurfaceCreateInfoKHR & setDpy( Display* dpy_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dpy = dpy_;
+      return *this;
+    }
+
+    XlibSurfaceCreateInfoKHR & setWindow( Window window_ ) VULKAN_HPP_NOEXCEPT
+    {
+      window = window_;
+      return *this;
+    }
+
+
+    operator VkXlibSurfaceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkXlibSurfaceCreateInfoKHR*>( this );
+    }
+
+    operator VkXlibSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkXlibSurfaceCreateInfoKHR*>( this );
+    }
+
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( XlibSurfaceCreateInfoKHR const& ) const = default;
+#else
+    bool operator==( XlibSurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( dpy == rhs.dpy )
+          && ( memcmp( &window, &rhs.window, sizeof( Window ) ) == 0 );
+    }
+
+    bool operator!=( XlibSurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+#endif
+
+
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eXlibSurfaceCreateInfoKHR;
+    const void* pNext = {};
+    VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR flags = {};
+    Display* dpy = {};
+    Window window = {};
+
+  };
+  static_assert( sizeof( XlibSurfaceCreateInfoKHR ) == sizeof( VkXlibSurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<XlibSurfaceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  template <>
+  struct CppType<StructureType, StructureType::eXlibSurfaceCreateInfoKHR>
+  {
+    using Type = XlibSurfaceCreateInfoKHR;
+  };
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+  class DebugReportCallbackEXT
+  {
+  public:
+    using CType = VkDebugReportCallbackEXT;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDebugReportCallbackEXT;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDebugReportCallbackEXT;
+
+  public:
+    VULKAN_HPP_CONSTEXPR DebugReportCallbackEXT() VULKAN_HPP_NOEXCEPT
+      : m_debugReportCallbackEXT(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR DebugReportCallbackEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_debugReportCallbackEXT(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT DebugReportCallbackEXT( VkDebugReportCallbackEXT debugReportCallbackEXT ) VULKAN_HPP_NOEXCEPT
+      : m_debugReportCallbackEXT( debugReportCallbackEXT )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    DebugReportCallbackEXT & operator=(VkDebugReportCallbackEXT debugReportCallbackEXT) VULKAN_HPP_NOEXCEPT
+    {
+      m_debugReportCallbackEXT = debugReportCallbackEXT;
+      return *this;
+    }
+#endif
+
+    DebugReportCallbackEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_debugReportCallbackEXT = VK_NULL_HANDLE;
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DebugReportCallbackEXT const& ) const = default;
+#else
+    bool operator==( DebugReportCallbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_debugReportCallbackEXT == rhs.m_debugReportCallbackEXT;
+    }
+
+    bool operator!=(DebugReportCallbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_debugReportCallbackEXT != rhs.m_debugReportCallbackEXT;
+    }
+
+    bool operator<(DebugReportCallbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_debugReportCallbackEXT < rhs.m_debugReportCallbackEXT;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDebugReportCallbackEXT() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_debugReportCallbackEXT;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_debugReportCallbackEXT != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_debugReportCallbackEXT == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkDebugReportCallbackEXT m_debugReportCallbackEXT;
+  };
+  static_assert( sizeof( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT ) == sizeof( VkDebugReportCallbackEXT ), "handle and wrapper have different size!" );
+
+  template <>
+  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eDebugReportCallbackEXT>
+  {
+    using type = VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT;
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDebugReportCallbackEXT>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDebugReportCallbackEXT>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+  class DebugUtilsMessengerEXT
+  {
+  public:
+    using CType = VkDebugUtilsMessengerEXT;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDebugUtilsMessengerEXT;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
+
+  public:
+    VULKAN_HPP_CONSTEXPR DebugUtilsMessengerEXT() VULKAN_HPP_NOEXCEPT
+      : m_debugUtilsMessengerEXT(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR DebugUtilsMessengerEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_debugUtilsMessengerEXT(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT DebugUtilsMessengerEXT( VkDebugUtilsMessengerEXT debugUtilsMessengerEXT ) VULKAN_HPP_NOEXCEPT
+      : m_debugUtilsMessengerEXT( debugUtilsMessengerEXT )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    DebugUtilsMessengerEXT & operator=(VkDebugUtilsMessengerEXT debugUtilsMessengerEXT) VULKAN_HPP_NOEXCEPT
+    {
+      m_debugUtilsMessengerEXT = debugUtilsMessengerEXT;
+      return *this;
+    }
+#endif
+
+    DebugUtilsMessengerEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_debugUtilsMessengerEXT = VK_NULL_HANDLE;
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( DebugUtilsMessengerEXT const& ) const = default;
+#else
+    bool operator==( DebugUtilsMessengerEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_debugUtilsMessengerEXT == rhs.m_debugUtilsMessengerEXT;
+    }
+
+    bool operator!=(DebugUtilsMessengerEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_debugUtilsMessengerEXT != rhs.m_debugUtilsMessengerEXT;
+    }
+
+    bool operator<(DebugUtilsMessengerEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_debugUtilsMessengerEXT < rhs.m_debugUtilsMessengerEXT;
+    }
+#endif
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDebugUtilsMessengerEXT() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_debugUtilsMessengerEXT;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_debugUtilsMessengerEXT != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_debugUtilsMessengerEXT == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkDebugUtilsMessengerEXT m_debugUtilsMessengerEXT;
+  };
+  static_assert( sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT ) == sizeof( VkDebugUtilsMessengerEXT ), "handle and wrapper have different size!" );
+
+  template <>
+  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eDebugUtilsMessengerEXT>
+  {
+    using type = VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT;
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDebugUtilsMessengerEXT>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT;
+  };
+
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  class Instance;
+  template <typename Dispatch> class UniqueHandleTraits<DebugReportCallbackEXT, Dispatch> { public: using deleter = ObjectDestroy<Instance, Dispatch>; };
+  using UniqueDebugReportCallbackEXT = UniqueHandle<DebugReportCallbackEXT, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch> class UniqueHandleTraits<DebugUtilsMessengerEXT, Dispatch> { public: using deleter = ObjectDestroy<Instance, Dispatch>; };
+  using UniqueDebugUtilsMessengerEXT = UniqueHandle<DebugUtilsMessengerEXT, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch> class UniqueHandleTraits<SurfaceKHR, Dispatch> { public: using deleter = ObjectDestroy<Instance, Dispatch>; };
+  using UniqueSurfaceKHR = UniqueHandle<SurfaceKHR, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+
+  class Instance
+  {
+  public:
+    using CType = VkInstance;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eInstance;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eInstance;
+
+  public:
+    VULKAN_HPP_CONSTEXPR Instance() VULKAN_HPP_NOEXCEPT
+      : m_instance(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR Instance( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_instance(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT Instance( VkInstance instance ) VULKAN_HPP_NOEXCEPT
+      : m_instance( instance )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    Instance & operator=(VkInstance instance) VULKAN_HPP_NOEXCEPT
+    {
+      m_instance = instance;
+      return *this;
+    }
+#endif
+
+    Instance & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_instance = VK_NULL_HANDLE;
+      return *this;
+    }
+
+#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
+    auto operator<=>( Instance const& ) const = default;
+#else
+    bool operator==( Instance const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_instance == rhs.m_instance;
+    }
+
+    bool operator!=(Instance const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_instance != rhs.m_instance;
+    }
+
+    bool operator<(Instance const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_instance < rhs.m_instance;
+    }
+#endif
+
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createAndroidSurfaceKHRUnique( const AndroidSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT* pCallback, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT>::type createDebugReportCallbackEXT( const DebugReportCallbackCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT, Dispatch>>::type createDebugReportCallbackEXTUnique( const DebugReportCallbackCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
 
-  template<typename Dispatch = DispatchLoaderStatic>
-  Result enumerateInstanceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, ExtensionProperties* pProperties, Dispatch const &d = Dispatch() );
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT* pMessenger, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template <typename Allocator = std::allocator<ExtensionProperties>, typename Dispatch = DispatchLoaderStatic> 
-  typename ResultValueType<std::vector<ExtensionProperties,Allocator>>::type enumerateInstanceExtensionProperties( Optional<const std::string> layerName = nullptr, Dispatch const &d = Dispatch() );
-  template <typename Allocator = std::allocator<ExtensionProperties>, typename Dispatch = DispatchLoaderStatic> 
-  typename ResultValueType<std::vector<ExtensionProperties,Allocator>>::type enumerateInstanceExtensionProperties( Optional<const std::string> layerName, Allocator const& vectorAllocator, Dispatch const &d );
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT>::type createDebugUtilsMessengerEXT( const DebugUtilsMessengerCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT, Dispatch>>::type createDebugUtilsMessengerEXTUnique( const DebugUtilsMessengerCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VK_USE_PLATFORM_DIRECTFB_EXT
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createDirectFBSurfaceEXT( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createDirectFBSurfaceEXT( const DirectFBSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createDirectFBSurfaceEXTUnique( const DirectFBSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createDisplayPlaneSurfaceKHRUnique( const DisplaySurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createHeadlessSurfaceEXT( const HeadlessSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createHeadlessSurfaceEXTUnique( const HeadlessSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VK_USE_PLATFORM_IOS_MVK
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createIOSSurfaceMVK( const IOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createIOSSurfaceMVKUnique( const IOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+
+
+#ifdef VK_USE_PLATFORM_FUCHSIA
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createImagePipeSurfaceFUCHSIA( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createImagePipeSurfaceFUCHSIA( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createImagePipeSurfaceFUCHSIAUnique( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createMacOSSurfaceMVK( const MacOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createMacOSSurfaceMVKUnique( const MacOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+
+
+#ifdef VK_USE_PLATFORM_METAL_EXT
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createMetalSurfaceEXT( const MetalSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createMetalSurfaceEXTUnique( const MetalSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+
+#ifdef VK_USE_PLATFORM_GGP
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createStreamDescriptorSurfaceGGP( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createStreamDescriptorSurfaceGGP( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createStreamDescriptorSurfaceGGPUnique( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_GGP*/
+
+
+#ifdef VK_USE_PLATFORM_VI_NN
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createViSurfaceNN( const ViSurfaceCreateInfoNN & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createViSurfaceNNUnique( const ViSurfaceCreateInfoNN & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_VI_NN*/
+
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createWaylandSurfaceKHRUnique( const WaylandSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createWin32SurfaceKHRUnique( const Win32SurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createXcbSurfaceKHRUnique( const XcbSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createXlibSurfaceKHRUnique( const XlibSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const std::string & layerPrefix, const std::string & message, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result enumeratePhysicalDeviceGroups( uint32_t* pPhysicalDeviceGroupCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator  = std::allocator<PhysicalDeviceGroupProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PhysicalDeviceGroupProperties,Allocator>>::type enumeratePhysicalDeviceGroups(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template<typename Allocator  = std::allocator<PhysicalDeviceGroupProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, PhysicalDeviceGroupProperties>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PhysicalDeviceGroupProperties,Allocator>>::type enumeratePhysicalDeviceGroups(Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result enumeratePhysicalDeviceGroupsKHR( uint32_t* pPhysicalDeviceGroupCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator  = std::allocator<PhysicalDeviceGroupProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PhysicalDeviceGroupProperties,Allocator>>::type enumeratePhysicalDeviceGroupsKHR(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template<typename Allocator  = std::allocator<PhysicalDeviceGroupProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, PhysicalDeviceGroupProperties>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PhysicalDeviceGroupProperties,Allocator>>::type enumeratePhysicalDeviceGroupsKHR(Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result enumeratePhysicalDevices( uint32_t* pPhysicalDeviceCount, VULKAN_HPP_NAMESPACE::PhysicalDevice* pPhysicalDevices, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator  = std::allocator<PhysicalDevice>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PhysicalDevice,Allocator>>::type enumeratePhysicalDevices(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+    template<typename Allocator  = std::allocator<PhysicalDevice>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, PhysicalDevice>::value, int>::type = 0>
+    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PhysicalDevice,Allocator>>::type enumeratePhysicalDevices(Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    PFN_vkVoidFunction getProcAddr( const char* pName, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    PFN_vkVoidFunction getProcAddr( const std::string & name, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT* pCallbackData, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, const DebugUtilsMessengerCallbackDataEXT & callbackData, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkInstance() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_instance;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_instance != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_instance == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkInstance m_instance;
+  };
+  static_assert( sizeof( VULKAN_HPP_NAMESPACE::Instance ) == sizeof( VkInstance ), "handle and wrapper have different size!" );
+
+  template <>
+  struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eInstance>
+  {
+    using type = VULKAN_HPP_NAMESPACE::Instance;
+  };
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eInstance>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::Instance;
+  };
+
+
+  template <>
+  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eInstance>
+  {
+    using Type = VULKAN_HPP_NAMESPACE::Instance;
+  };
+
+
+  template <>
+  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Instance>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+  };
+
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch> class UniqueHandleTraits<Instance, Dispatch> { public: using deleter = ObjectDestroy<NoParent, Dispatch>; };
+  using UniqueInstance = UniqueHandle<Instance, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Instance* pInstance, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::Instance>::type createInstance( const InstanceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT );
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Instance, Dispatch>>::type createInstanceUnique( const InstanceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT );
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  VULKAN_HPP_NODISCARD Result enumerateInstanceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::ExtensionProperties* pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator  = std::allocator<ExtensionProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<ExtensionProperties,Allocator>>::type enumerateInstanceExtensionProperties( Optional<const std::string> layerName VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT );
+  template<typename Allocator  = std::allocator<ExtensionProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, ExtensionProperties>::value, int>::type = 0>
+  VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<ExtensionProperties,Allocator>>::type enumerateInstanceExtensionProperties( Optional<const std::string> layerName, Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT );
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  VULKAN_HPP_NODISCARD Result enumerateInstanceLayerProperties( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::LayerProperties* pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator  = std::allocator<LayerProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<LayerProperties,Allocator>>::type enumerateInstanceLayerProperties(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT );
+  template<typename Allocator  = std::allocator<LayerProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, LayerProperties>::value, int>::type = 0>
+  VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<LayerProperties,Allocator>>::type enumerateInstanceLayerProperties(Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT );
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NODISCARD Result enumerateInstanceVersion( uint32_t* pApiVersion, Dispatch const & d  VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<uint32_t>::type enumerateInstanceVersion( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT );
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Instance* pInstance, Dispatch const & d  ) VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkInstance *>( pInstance ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Instance>::type createInstance( const InstanceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d )
+  {
+    VULKAN_HPP_NAMESPACE::Instance instance;
+    Result result = static_cast<Result>( d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkInstance *>( &instance ) ) );
+    return createResultValue( result, instance, VULKAN_HPP_NAMESPACE_STRING "::createInstance" );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Instance, Dispatch>>::type createInstanceUnique( const InstanceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d )
+  {
+    VULKAN_HPP_NAMESPACE::Instance instance;
+    Result result = static_cast<Result>( d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkInstance *>( &instance ) ) );
+    ObjectDestroy<NoParent, Dispatch> deleter( allocator, d );
+    return createResultValue<VULKAN_HPP_NAMESPACE::Instance, Dispatch>( result, instance, VULKAN_HPP_NAMESPACE_STRING "::createInstanceUnique", deleter );
+  }
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
   template<typename Dispatch>
-  VULKAN_HPP_INLINE Result enumerateInstanceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, ExtensionProperties* pProperties, Dispatch const &d)
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::ExtensionProperties* pProperties, Dispatch const &d) VULKAN_HPP_NOEXCEPT
   {
     return static_cast<Result>( d.vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties*>( pProperties ) ) );
   }
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<ExtensionProperties,Allocator>>::type enumerateInstanceExtensionProperties( Optional<const std::string> layerName, Dispatch const &d )
+  template<typename Allocator , typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<ExtensionProperties,Allocator>>::type enumerateInstanceExtensionProperties( Optional<const std::string> layerName, Dispatch const &d )
   {
     std::vector<ExtensionProperties,Allocator> properties;
     uint32_t propertyCount;
@@ -37679,12 +81247,15 @@
         result = static_cast<Result>( d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties*>( properties.data() ) ) );
       }
     } while ( result == Result::eIncomplete );
-    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
-    properties.resize( propertyCount );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+      properties.resize( propertyCount );
+    }
     return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::enumerateInstanceExtensionProperties" );
   }
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<ExtensionProperties,Allocator>>::type enumerateInstanceExtensionProperties( Optional<const std::string> layerName, Allocator const& vectorAllocator, Dispatch const &d )
+  template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, ExtensionProperties>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<ExtensionProperties,Allocator>>::type enumerateInstanceExtensionProperties( Optional<const std::string> layerName, Allocator const& vectorAllocator, Dispatch const &d )
   {
     std::vector<ExtensionProperties,Allocator> properties( vectorAllocator );
     uint32_t propertyCount;
@@ -37698,6512 +81269,5421 @@
         result = static_cast<Result>( d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties*>( properties.data() ) ) );
       }
     } while ( result == Result::eIncomplete );
-    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
-    properties.resize( propertyCount );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+      properties.resize( propertyCount );
+    }
     return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::enumerateInstanceExtensionProperties" );
   }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-
-  // forward declarations
-  struct CmdProcessCommandsInfoNVX;
-
-  class CommandBuffer
-  {
-  public:
-    VULKAN_HPP_CONSTEXPR CommandBuffer()
-      : m_commandBuffer(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_CONSTEXPR CommandBuffer( std::nullptr_t )
-      : m_commandBuffer(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT CommandBuffer( VkCommandBuffer commandBuffer )
-      : m_commandBuffer( commandBuffer )
-    {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
-    CommandBuffer & operator=(VkCommandBuffer commandBuffer)
-    {
-      m_commandBuffer = commandBuffer;
-      return *this; 
-    }
-#endif
-
-    CommandBuffer & operator=( std::nullptr_t )
-    {
-      m_commandBuffer = VK_NULL_HANDLE;
-      return *this;
-    }
-
-    bool operator==( CommandBuffer const & rhs ) const
-    {
-      return m_commandBuffer == rhs.m_commandBuffer;
-    }
-
-    bool operator!=(CommandBuffer const & rhs ) const
-    {
-      return m_commandBuffer != rhs.m_commandBuffer;
-    }
-
-    bool operator<(CommandBuffer const & rhs ) const
-    {
-      return m_commandBuffer < rhs.m_commandBuffer;
-    }
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result begin( const CommandBufferBeginInfo* pBeginInfo, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<void>::type begin( const CommandBufferBeginInfo & beginInfo, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result end(Dispatch const &d = Dispatch() ) const;
-#else
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<void>::type end(Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result reset( CommandBufferResetFlags flags, Dispatch const &d = Dispatch() ) const;
-#else
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<void>::type reset( CommandBufferResetFlags flags, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void bindPipeline( PipelineBindPoint pipelineBindPoint, Pipeline pipeline, Dispatch const &d = Dispatch() ) const;
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void setViewport( uint32_t firstViewport, uint32_t viewportCount, const Viewport* pViewports, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void setViewport( uint32_t firstViewport, ArrayProxy<const Viewport> viewports, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void setScissor( uint32_t firstScissor, uint32_t scissorCount, const Rect2D* pScissors, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void setScissor( uint32_t firstScissor, ArrayProxy<const Rect2D> scissors, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void setLineWidth( float lineWidth, Dispatch const &d = Dispatch() ) const;
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const &d = Dispatch() ) const;
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void setBlendConstants( const float blendConstants[4], Dispatch const &d = Dispatch() ) const;
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const &d = Dispatch() ) const;
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void setStencilCompareMask( StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const &d = Dispatch() ) const;
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void setStencilWriteMask( StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const &d = Dispatch() ) const;
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void setStencilReference( StencilFaceFlags faceMask, uint32_t reference, Dispatch const &d = Dispatch() ) const;
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void bindDescriptorSets( PipelineBindPoint pipelineBindPoint, PipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const DescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void bindDescriptorSets( PipelineBindPoint pipelineBindPoint, PipelineLayout layout, uint32_t firstSet, ArrayProxy<const DescriptorSet> descriptorSets, ArrayProxy<const uint32_t> dynamicOffsets, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void bindIndexBuffer( Buffer buffer, DeviceSize offset, IndexType indexType, Dispatch const &d = Dispatch() ) const;
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void bindVertexBuffers( uint32_t firstBinding, uint32_t bindingCount, const Buffer* pBuffers, const DeviceSize* pOffsets, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void bindVertexBuffers( uint32_t firstBinding, ArrayProxy<const Buffer> buffers, ArrayProxy<const DeviceSize> offsets, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const &d = Dispatch() ) const;
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance, Dispatch const &d = Dispatch() ) const;
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void drawIndirect( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d = Dispatch() ) const;
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void drawIndexedIndirect( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d = Dispatch() ) const;
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d = Dispatch() ) const;
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void dispatchIndirect( Buffer buffer, DeviceSize offset, Dispatch const &d = Dispatch() ) const;
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void copyBuffer( Buffer srcBuffer, Buffer dstBuffer, uint32_t regionCount, const BufferCopy* pRegions, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void copyBuffer( Buffer srcBuffer, Buffer dstBuffer, ArrayProxy<const BufferCopy> regions, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void copyImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const ImageCopy* pRegions, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void copyImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, ArrayProxy<const ImageCopy> regions, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void blitImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const ImageBlit* pRegions, Filter filter, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void blitImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, ArrayProxy<const ImageBlit> regions, Filter filter, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void copyBufferToImage( Buffer srcBuffer, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const BufferImageCopy* pRegions, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void copyBufferToImage( Buffer srcBuffer, Image dstImage, ImageLayout dstImageLayout, ArrayProxy<const BufferImageCopy> regions, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void copyImageToBuffer( Image srcImage, ImageLayout srcImageLayout, Buffer dstBuffer, uint32_t regionCount, const BufferImageCopy* pRegions, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void copyImageToBuffer( Image srcImage, ImageLayout srcImageLayout, Buffer dstBuffer, ArrayProxy<const BufferImageCopy> regions, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void updateBuffer( Buffer dstBuffer, DeviceSize dstOffset, DeviceSize dataSize, const void* pData, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename T, typename Dispatch = DispatchLoaderStatic>
-    void updateBuffer( Buffer dstBuffer, DeviceSize dstOffset, ArrayProxy<const T> data, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void fillBuffer( Buffer dstBuffer, DeviceSize dstOffset, DeviceSize size, uint32_t data, Dispatch const &d = Dispatch() ) const;
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void clearColorImage( Image image, ImageLayout imageLayout, const ClearColorValue* pColor, uint32_t rangeCount, const ImageSubresourceRange* pRanges, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void clearColorImage( Image image, ImageLayout imageLayout, const ClearColorValue & color, ArrayProxy<const ImageSubresourceRange> ranges, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void clearDepthStencilImage( Image image, ImageLayout imageLayout, const ClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const ImageSubresourceRange* pRanges, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void clearDepthStencilImage( Image image, ImageLayout imageLayout, const ClearDepthStencilValue & depthStencil, ArrayProxy<const ImageSubresourceRange> ranges, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void clearAttachments( uint32_t attachmentCount, const ClearAttachment* pAttachments, uint32_t rectCount, const ClearRect* pRects, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void clearAttachments( ArrayProxy<const ClearAttachment> attachments, ArrayProxy<const ClearRect> rects, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void resolveImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const ImageResolve* pRegions, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void resolveImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, ArrayProxy<const ImageResolve> regions, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void setEvent( Event event, PipelineStageFlags stageMask, Dispatch const &d = Dispatch() ) const;
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void resetEvent( Event event, PipelineStageFlags stageMask, Dispatch const &d = Dispatch() ) const;
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void waitEvents( uint32_t eventCount, const Event* pEvents, PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void waitEvents( ArrayProxy<const Event> events, PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, ArrayProxy<const MemoryBarrier> memoryBarriers, ArrayProxy<const BufferMemoryBarrier> bufferMemoryBarriers, ArrayProxy<const ImageMemoryBarrier> imageMemoryBarriers, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void pipelineBarrier( PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, DependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void pipelineBarrier( PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, DependencyFlags dependencyFlags, ArrayProxy<const MemoryBarrier> memoryBarriers, ArrayProxy<const BufferMemoryBarrier> bufferMemoryBarriers, ArrayProxy<const ImageMemoryBarrier> imageMemoryBarriers, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void beginQuery( QueryPool queryPool, uint32_t query, QueryControlFlags flags, Dispatch const &d = Dispatch() ) const;
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void endQuery( QueryPool queryPool, uint32_t query, Dispatch const &d = Dispatch() ) const;
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void beginConditionalRenderingEXT( const ConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void beginConditionalRenderingEXT( const ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void endConditionalRenderingEXT(Dispatch const &d = Dispatch() ) const;
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void resetQueryPool( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d = Dispatch() ) const;
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void writeTimestamp( PipelineStageFlagBits pipelineStage, QueryPool queryPool, uint32_t query, Dispatch const &d = Dispatch() ) const;
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void copyQueryPoolResults( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Buffer dstBuffer, DeviceSize dstOffset, DeviceSize stride, QueryResultFlags flags, Dispatch const &d = Dispatch() ) const;
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void pushConstants( PipelineLayout layout, ShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename T, typename Dispatch = DispatchLoaderStatic>
-    void pushConstants( PipelineLayout layout, ShaderStageFlags stageFlags, uint32_t offset, ArrayProxy<const T> values, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void beginRenderPass( const RenderPassBeginInfo* pRenderPassBegin, SubpassContents contents, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void beginRenderPass( const RenderPassBeginInfo & renderPassBegin, SubpassContents contents, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void nextSubpass( SubpassContents contents, Dispatch const &d = Dispatch() ) const;
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void endRenderPass(Dispatch const &d = Dispatch() ) const;
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void executeCommands( uint32_t commandBufferCount, const CommandBuffer* pCommandBuffers, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void executeCommands( ArrayProxy<const CommandBuffer> commandBuffers, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void debugMarkerBeginEXT( const DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void debugMarkerBeginEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void debugMarkerEndEXT(Dispatch const &d = Dispatch() ) const;
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void debugMarkerInsertEXT( const DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void debugMarkerInsertEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void drawIndirectCountAMD( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = Dispatch() ) const;
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void drawIndexedIndirectCountAMD( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = Dispatch() ) const;
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void processCommandsNVX( const CmdProcessCommandsInfoNVX* pProcessCommandsInfo, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void processCommandsNVX( const CmdProcessCommandsInfoNVX & processCommandsInfo, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void reserveSpaceForCommandsNVX( const CmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void reserveSpaceForCommandsNVX( const CmdReserveSpaceForCommandsInfoNVX & reserveSpaceInfo, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void pushDescriptorSetKHR( PipelineBindPoint pipelineBindPoint, PipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const WriteDescriptorSet* pDescriptorWrites, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void pushDescriptorSetKHR( PipelineBindPoint pipelineBindPoint, PipelineLayout layout, uint32_t set, ArrayProxy<const WriteDescriptorSet> descriptorWrites, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void setDeviceMask( uint32_t deviceMask, Dispatch const &d = Dispatch() ) const;
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void setDeviceMaskKHR( uint32_t deviceMask, Dispatch const &d = Dispatch() ) const;
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void dispatchBase( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d = Dispatch() ) const;
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void dispatchBaseKHR( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d = Dispatch() ) const;
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void pushDescriptorSetWithTemplateKHR( DescriptorUpdateTemplate descriptorUpdateTemplate, PipelineLayout layout, uint32_t set, const void* pData, Dispatch const &d = Dispatch() ) const;
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void setViewportWScalingNV( uint32_t firstViewport, uint32_t viewportCount, const ViewportWScalingNV* pViewportWScalings, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void setViewportWScalingNV( uint32_t firstViewport, ArrayProxy<const ViewportWScalingNV> viewportWScalings, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const Rect2D* pDiscardRectangles, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, ArrayProxy<const Rect2D> discardRectangles, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void setSampleLocationsEXT( const SampleLocationsInfoEXT* pSampleLocationsInfo, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void setSampleLocationsEXT( const SampleLocationsInfoEXT & sampleLocationsInfo, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void endDebugUtilsLabelEXT(Dispatch const &d = Dispatch() ) const;
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void writeBufferMarkerAMD( PipelineStageFlagBits pipelineStage, Buffer dstBuffer, DeviceSize dstOffset, uint32_t marker, Dispatch const &d = Dispatch() ) const;
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void beginRenderPass2KHR( const RenderPassBeginInfo* pRenderPassBegin, const SubpassBeginInfoKHR* pSubpassBeginInfo, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void beginRenderPass2KHR( const RenderPassBeginInfo & renderPassBegin, const SubpassBeginInfoKHR & subpassBeginInfo, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void nextSubpass2KHR( const SubpassBeginInfoKHR* pSubpassBeginInfo, const SubpassEndInfoKHR* pSubpassEndInfo, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void nextSubpass2KHR( const SubpassBeginInfoKHR & subpassBeginInfo, const SubpassEndInfoKHR & subpassEndInfo, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void endRenderPass2KHR( const SubpassEndInfoKHR* pSubpassEndInfo, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void endRenderPass2KHR( const SubpassEndInfoKHR & subpassEndInfo, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void drawIndirectCountKHR( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = Dispatch() ) const;
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void drawIndexedIndirectCountKHR( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = Dispatch() ) const;
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void setCheckpointNV( const void* pCheckpointMarker, Dispatch const &d = Dispatch() ) const;
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void bindTransformFeedbackBuffersEXT( uint32_t firstBinding, uint32_t bindingCount, const Buffer* pBuffers, const DeviceSize* pOffsets, const DeviceSize* pSizes, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void bindTransformFeedbackBuffersEXT( uint32_t firstBinding, ArrayProxy<const Buffer> buffers, ArrayProxy<const DeviceSize> offsets, ArrayProxy<const DeviceSize> sizes, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const Buffer* pCounterBuffers, const DeviceSize* pCounterBufferOffsets, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy<const Buffer> counterBuffers, ArrayProxy<const DeviceSize> counterBufferOffsets, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void endTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const Buffer* pCounterBuffers, const DeviceSize* pCounterBufferOffsets, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void endTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy<const Buffer> counterBuffers, ArrayProxy<const DeviceSize> counterBufferOffsets, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void beginQueryIndexedEXT( QueryPool queryPool, uint32_t query, QueryControlFlags flags, uint32_t index, Dispatch const &d = Dispatch() ) const;
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void endQueryIndexedEXT( QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const &d = Dispatch() ) const;
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void drawIndirectByteCountEXT( uint32_t instanceCount, uint32_t firstInstance, Buffer counterBuffer, DeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride, Dispatch const &d = Dispatch() ) const;
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void setExclusiveScissorNV( uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const Rect2D* pExclusiveScissors, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void setExclusiveScissorNV( uint32_t firstExclusiveScissor, ArrayProxy<const Rect2D> exclusiveScissors, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void bindShadingRateImageNV( ImageView imageView, ImageLayout imageLayout, Dispatch const &d = Dispatch() ) const;
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void setViewportShadingRatePaletteNV( uint32_t firstViewport, uint32_t viewportCount, const ShadingRatePaletteNV* pShadingRatePalettes, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void setViewportShadingRatePaletteNV( uint32_t firstViewport, ArrayProxy<const ShadingRatePaletteNV> shadingRatePalettes, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void setCoarseSampleOrderNV( CoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const CoarseSampleOrderCustomNV* pCustomSampleOrders, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void setCoarseSampleOrderNV( CoarseSampleOrderTypeNV sampleOrderType, ArrayProxy<const CoarseSampleOrderCustomNV> customSampleOrders, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const &d = Dispatch() ) const;
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void drawMeshTasksIndirectNV( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d = Dispatch() ) const;
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void drawMeshTasksIndirectCountNV( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = Dispatch() ) const;
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void copyAccelerationStructureNV( AccelerationStructureNV dst, AccelerationStructureNV src, CopyAccelerationStructureModeNV mode, Dispatch const &d = Dispatch() ) const;
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void writeAccelerationStructuresPropertiesNV( uint32_t accelerationStructureCount, const AccelerationStructureNV* pAccelerationStructures, QueryType queryType, QueryPool queryPool, uint32_t firstQuery, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void writeAccelerationStructuresPropertiesNV( ArrayProxy<const AccelerationStructureNV> accelerationStructures, QueryType queryType, QueryPool queryPool, uint32_t firstQuery, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void buildAccelerationStructureNV( const AccelerationStructureInfoNV* pInfo, Buffer instanceData, DeviceSize instanceOffset, Bool32 update, AccelerationStructureNV dst, AccelerationStructureNV src, Buffer scratch, DeviceSize scratchOffset, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void buildAccelerationStructureNV( const AccelerationStructureInfoNV & info, Buffer instanceData, DeviceSize instanceOffset, Bool32 update, AccelerationStructureNV dst, AccelerationStructureNV src, Buffer scratch, DeviceSize scratchOffset, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void traceRaysNV( Buffer raygenShaderBindingTableBuffer, DeviceSize raygenShaderBindingOffset, Buffer missShaderBindingTableBuffer, DeviceSize missShaderBindingOffset, DeviceSize missShaderBindingStride, Buffer hitShaderBindingTableBuffer, DeviceSize hitShaderBindingOffset, DeviceSize hitShaderBindingStride, Buffer callableShaderBindingTableBuffer, DeviceSize callableShaderBindingOffset, DeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d = Dispatch() ) const;
-
-
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCommandBuffer() const
-    {
-      return m_commandBuffer;
-    }
-
-    explicit operator bool() const
-    {
-      return m_commandBuffer != VK_NULL_HANDLE;
-    }
-
-    bool operator!() const
-    {
-      return m_commandBuffer == VK_NULL_HANDLE;
-    }
-
-  private:
-    VkCommandBuffer m_commandBuffer;
-  };
-
-  static_assert( sizeof( CommandBuffer ) == sizeof( VkCommandBuffer ), "handle and wrapper have different size!" );
-
   template<typename Dispatch>
-  VULKAN_HPP_INLINE Result CommandBuffer::begin( const CommandBufferBeginInfo* pBeginInfo, Dispatch const &d) const
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceLayerProperties( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::LayerProperties* pProperties, Dispatch const &d) VULKAN_HPP_NOEXCEPT
   {
-    return static_cast<Result>( d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo*>( pBeginInfo ) ) );
+    return static_cast<Result>( d.vkEnumerateInstanceLayerProperties( pPropertyCount, reinterpret_cast<VkLayerProperties*>( pProperties ) ) );
   }
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<void>::type CommandBuffer::begin( const CommandBufferBeginInfo & beginInfo, Dispatch const &d ) const
+  template<typename Allocator , typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<LayerProperties,Allocator>>::type enumerateInstanceLayerProperties(Dispatch const &d )
   {
-    Result result = static_cast<Result>( d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo*>( &beginInfo ) ) );
-    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::begin" );
+    std::vector<LayerProperties,Allocator> properties;
+    uint32_t propertyCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = static_cast<Result>( d.vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast<VkLayerProperties*>( properties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+      properties.resize( propertyCount );
+    }
+    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::enumerateInstanceLayerProperties" );
+  }
+  template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, LayerProperties>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<LayerProperties,Allocator>>::type enumerateInstanceLayerProperties(Allocator const& vectorAllocator, Dispatch const &d )
+  {
+    std::vector<LayerProperties,Allocator> properties( vectorAllocator );
+    uint32_t propertyCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = static_cast<Result>( d.vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast<VkLayerProperties*>( properties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+      properties.resize( propertyCount );
+    }
+    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::enumerateInstanceLayerProperties" );
   }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceVersion( uint32_t* pApiVersion, Dispatch const & d  ) VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkEnumerateInstanceVersion( pApiVersion ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<uint32_t>::type enumerateInstanceVersion( Dispatch const & d )
+  {
+    uint32_t apiVersion;
+    Result result = static_cast<Result>( d.vkEnumerateInstanceVersion( &apiVersion ) );
+    return createResultValue( result, apiVersion, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceVersion" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo* pBeginInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo *>( pBeginInfo ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::begin( const CommandBufferBeginInfo & beginInfo, Dispatch const & d ) const
+  {
+    Result result = static_cast<Result>( d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo *>( &beginInfo ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::begin" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdBeginConditionalRenderingEXT( m_commandBuffer, reinterpret_cast<const VkConditionalRenderingBeginInfoEXT *>( pConditionalRenderingBegin ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( const ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdBeginConditionalRenderingEXT( m_commandBuffer, reinterpret_cast<const VkConditionalRenderingBeginInfoEXT *>( &conditionalRenderingBegin ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdBeginQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, uint32_t index, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdBeginQueryIndexedEXT( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ), index );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ), static_cast<VkSubpassContents>( contents ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const RenderPassBeginInfo & renderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ), static_cast<VkSubpassContents>( contents ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdBeginRenderPass2( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2( const RenderPassBeginInfo & renderPassBegin, const SubpassBeginInfo & subpassBeginInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdBeginRenderPass2( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdBeginRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const RenderPassBeginInfo & renderPassBegin, const SubpassBeginInfo & subpassBeginInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdBeginRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer* pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pCounterBufferOffsets, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer, firstCounterBuffer, counterBufferCount, reinterpret_cast<const VkBuffer *>( pCounterBuffers ), reinterpret_cast<const VkDeviceSize *>( pCounterBufferOffsets ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
+  {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    VULKAN_HPP_ASSERT( counterBufferOffsets.empty() || counterBuffers.size() == counterBufferOffsets.size() );
+#else
+    if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() )
+  {
+    throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::beginTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" );
+  }
+#endif  /*VULKAN_HPP_NO_EXCEPTIONS*/
+
+    d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer, firstCounterBuffer, counterBuffers.size(), reinterpret_cast<const VkBuffer *>( counterBuffers.data() ), reinterpret_cast<const VkDeviceSize *>( counterBufferOffsets.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdBindDescriptorSets( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), firstSet, descriptorSetCount, reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ), dynamicOffsetCount, pDynamicOffsets );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets, ArrayProxy<const uint32_t> const & dynamicOffsets, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdBindDescriptorSets( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), firstSet, descriptorSets.size(), reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ), dynamicOffsets.size(), dynamicOffsets.data() );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::IndexType indexType, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdBindIndexBuffer( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkIndexType>( indexType ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdBindPipeline( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t groupIndex, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdBindPipelineShaderGroupNV( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ), groupIndex );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdBindShadingRateImageNV( m_commandBuffer, static_cast<VkImageView>( imageView ), static_cast<VkImageLayout>( imageLayout ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, const VULKAN_HPP_NAMESPACE::DeviceSize* pSizes, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer, firstBinding, bindingCount, reinterpret_cast<const VkBuffer *>( pBuffers ), reinterpret_cast<const VkDeviceSize *>( pOffsets ), reinterpret_cast<const VkDeviceSize *>( pSizes ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
+  {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    VULKAN_HPP_ASSERT( buffers.size() == offsets.size() );
+    VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() );
+#else
+    if ( buffers.size() != offsets.size() )
+  {
+    throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != offsets.size()" );
+  }
+    if ( !sizes.empty() && buffers.size() != sizes.size() )
+  {
+    throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != sizes.size()" );
+  }
+#endif  /*VULKAN_HPP_NO_EXCEPTIONS*/
+
+    d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer, firstBinding, buffers.size(), reinterpret_cast<const VkBuffer *>( buffers.data() ), reinterpret_cast<const VkDeviceSize *>( offsets.data() ), reinterpret_cast<const VkDeviceSize *>( sizes.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdBindVertexBuffers( m_commandBuffer, firstBinding, bindingCount, reinterpret_cast<const VkBuffer *>( pBuffers ), reinterpret_cast<const VkDeviceSize *>( pOffsets ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
+  {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    VULKAN_HPP_ASSERT( buffers.size() == offsets.size() );
+#else
+    if ( buffers.size() != offsets.size() )
+  {
+    throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers: buffers.size() != offsets.size()" );
+  }
+#endif  /*VULKAN_HPP_NO_EXCEPTIONS*/
+
+    d.vkCmdBindVertexBuffers( m_commandBuffer, firstBinding, buffers.size(), reinterpret_cast<const VkBuffer *>( buffers.data() ), reinterpret_cast<const VkDeviceSize *>( offsets.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, const VULKAN_HPP_NAMESPACE::DeviceSize* pSizes, const VULKAN_HPP_NAMESPACE::DeviceSize* pStrides, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdBindVertexBuffers2EXT( m_commandBuffer, firstBinding, bindingCount, reinterpret_cast<const VkBuffer *>( pBuffers ), reinterpret_cast<const VkDeviceSize *>( pOffsets ), reinterpret_cast<const VkDeviceSize *>( pSizes ), reinterpret_cast<const VkDeviceSize *>( pStrides ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
+  {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    VULKAN_HPP_ASSERT( buffers.size() == offsets.size() );
+    VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() );
+    VULKAN_HPP_ASSERT( strides.empty() || buffers.size() == strides.size() );
+#else
+    if ( buffers.size() != offsets.size() )
+  {
+    throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != offsets.size()" );
+  }
+    if ( !sizes.empty() && buffers.size() != sizes.size() )
+  {
+    throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != sizes.size()" );
+  }
+    if ( !strides.empty() && buffers.size() != strides.size() )
+  {
+    throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != strides.size()" );
+  }
+#endif  /*VULKAN_HPP_NO_EXCEPTIONS*/
+
+    d.vkCmdBindVertexBuffers2EXT( m_commandBuffer, firstBinding, buffers.size(), reinterpret_cast<const VkBuffer *>( buffers.data() ), reinterpret_cast<const VkDeviceSize *>( offsets.data() ), reinterpret_cast<const VkDeviceSize *>( sizes.data() ), reinterpret_cast<const VkDeviceSize *>( strides.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageBlit* pRegions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdBlitImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkImageBlit *>( pRegions ), static_cast<VkFilter>( filter ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageBlit> const & regions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdBlitImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size(), reinterpret_cast<const VkImageBlit *>( regions.data() ), static_cast<VkFilter>( filter ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2KHR* pBlitImageInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdBlitImage2KHR( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2KHR *>( pBlitImageInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const BlitImageInfo2KHR & blitImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdBlitImage2KHR( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2KHR *>( &blitImageInfo ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureIndirectKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR* pInfo, VULKAN_HPP_NAMESPACE::Buffer indirectBuffer, VULKAN_HPP_NAMESPACE::DeviceSize indirectOffset, uint32_t indirectStride, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdBuildAccelerationStructureIndirectKHR( m_commandBuffer, reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pInfo ), static_cast<VkBuffer>( indirectBuffer ), static_cast<VkDeviceSize>( indirectOffset ), indirectStride );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureIndirectKHR( const AccelerationStructureBuildGeometryInfoKHR & info, VULKAN_HPP_NAMESPACE::Buffer indirectBuffer, VULKAN_HPP_NAMESPACE::DeviceSize indirectOffset, uint32_t indirectStride, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdBuildAccelerationStructureIndirectKHR( m_commandBuffer, reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( &info ), static_cast<VkBuffer>( indirectBuffer ), static_cast<VkDeviceSize>( indirectOffset ), indirectStride );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureKHR( uint32_t infoCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR* pInfos, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildOffsetInfoKHR* const * ppOffsetInfos, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdBuildAccelerationStructureKHR( m_commandBuffer, infoCount, reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pInfos ), reinterpret_cast<const VkAccelerationStructureBuildOffsetInfoKHR * const *>( ppOffsetInfos ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos, ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildOffsetInfoKHR* const > const & pOffsetInfos, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
+  {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    VULKAN_HPP_ASSERT( infos.size() == pOffsetInfos.size() );
+#else
+    if ( infos.size() != pOffsetInfos.size() )
+  {
+    throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructureKHR: infos.size() != pOffsetInfos.size()" );
+  }
+#endif  /*VULKAN_HPP_NO_EXCEPTIONS*/
+
+    d.vkCmdBuildAccelerationStructureKHR( m_commandBuffer, infos.size(), reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ), reinterpret_cast<const VkAccelerationStructureBuildOffsetInfoKHR * const *>( pOffsetInfos.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV* pInfo, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, reinterpret_cast<const VkAccelerationStructureInfoNV *>( pInfo ), static_cast<VkBuffer>( instanceData ), static_cast<VkDeviceSize>( instanceOffset ), static_cast<VkBool32>( update ), static_cast<VkAccelerationStructureKHR>( dst ), static_cast<VkAccelerationStructureKHR>( src ), static_cast<VkBuffer>( scratch ), static_cast<VkDeviceSize>( scratchOffset ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const AccelerationStructureInfoNV & info, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, reinterpret_cast<const VkAccelerationStructureInfoNV *>( &info ), static_cast<VkBuffer>( instanceData ), static_cast<VkDeviceSize>( instanceOffset ), static_cast<VkBool32>( update ), static_cast<VkAccelerationStructureKHR>( dst ), static_cast<VkAccelerationStructureKHR>( src ), static_cast<VkBuffer>( scratch ), static_cast<VkDeviceSize>( scratchOffset ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( uint32_t attachmentCount, const VULKAN_HPP_NAMESPACE::ClearAttachment* pAttachments, uint32_t rectCount, const VULKAN_HPP_NAMESPACE::ClearRect* pRects, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdClearAttachments( m_commandBuffer, attachmentCount, reinterpret_cast<const VkClearAttachment *>( pAttachments ), rectCount, reinterpret_cast<const VkClearRect *>( pRects ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearAttachment> const & attachments, ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearRect> const & rects, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdClearAttachments( m_commandBuffer, attachments.size(), reinterpret_cast<const VkClearAttachment *>( attachments.data() ), rects.size(), reinterpret_cast<const VkClearRect *>( rects.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearColorValue* pColor, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange* pRanges, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdClearColorImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearColorValue *>( pColor ), rangeCount, reinterpret_cast<const VkImageSubresourceRange *>( pRanges ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const ClearColorValue & color, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdClearColorImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearColorValue *>( &color ), ranges.size(), reinterpret_cast<const VkImageSubresourceRange *>( ranges.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange* pRanges, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdClearDepthStencilImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearDepthStencilValue *>( pDepthStencil ), rangeCount, reinterpret_cast<const VkImageSubresourceRange *>( pRanges ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const ClearDepthStencilValue & depthStencil, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdClearDepthStencilImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearDepthStencilValue *>( &depthStencil ), ranges.size(), reinterpret_cast<const VkImageSubresourceRange *>( ranges.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR* pInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( pInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureKHR( const CopyAccelerationStructureInfoKHR & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( &info ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdCopyAccelerationStructureNV( m_commandBuffer, static_cast<VkAccelerationStructureKHR>( dst ), static_cast<VkAccelerationStructureKHR>( src ), static_cast<VkCopyAccelerationStructureModeKHR>( mode ) );
+  }
+
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR* pInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdCopyAccelerationStructureToMemoryKHR( m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( pInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR( const CopyAccelerationStructureToMemoryInfoKHR & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdCopyAccelerationStructureToMemoryKHR( m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( &info ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferCopy* pRegions, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdCopyBuffer( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkBuffer>( dstBuffer ), regionCount, reinterpret_cast<const VkBufferCopy *>( pRegions ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferCopy> const & regions, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdCopyBuffer( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkBuffer>( dstBuffer ), regions.size(), reinterpret_cast<const VkBufferCopy *>( regions.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2KHR* pCopyBufferInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdCopyBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2KHR *>( pCopyBufferInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2KHR( const CopyBufferInfo2KHR & copyBufferInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdCopyBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2KHR *>( &copyBufferInfo ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy* pRegions, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdCopyBufferToImage( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkBufferImageCopy *>( pRegions ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdCopyBufferToImage( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size(), reinterpret_cast<const VkBufferImageCopy *>( regions.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2KHR* pCopyBufferToImageInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdCopyBufferToImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferToImageInfo2KHR *>( pCopyBufferToImageInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2KHR( const CopyBufferToImageInfo2KHR & copyBufferToImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdCopyBufferToImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferToImageInfo2KHR *>( &copyBufferToImageInfo ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageCopy* pRegions, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdCopyImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkImageCopy *>( pRegions ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageCopy> const & regions, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdCopyImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size(), reinterpret_cast<const VkImageCopy *>( regions.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2KHR* pCopyImageInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdCopyImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2KHR *>( pCopyImageInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const CopyImageInfo2KHR & copyImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdCopyImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2KHR *>( &copyImageInfo ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy* pRegions, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdCopyImageToBuffer( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkBuffer>( dstBuffer ), regionCount, reinterpret_cast<const VkBufferImageCopy *>( pRegions ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdCopyImageToBuffer( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkBuffer>( dstBuffer ), regions.size(), reinterpret_cast<const VkBufferImageCopy *>( regions.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2KHR* pCopyImageToBufferInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdCopyImageToBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageToBufferInfo2KHR *>( pCopyImageToBufferInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2KHR( const CopyImageToBufferInfo2KHR & copyImageToBufferInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdCopyImageToBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageToBufferInfo2KHR *>( &copyImageToBufferInfo ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR* pInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdCopyMemoryToAccelerationStructureKHR( m_commandBuffer, reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( pInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR( const CopyMemoryToAccelerationStructureInfoKHR & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdCopyMemoryToAccelerationStructureKHR( m_commandBuffer, reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( &info ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdCopyQueryPoolResults( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), static_cast<VkDeviceSize>( stride ), static_cast<VkQueryResultFlags>( flags ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( pMarkerInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( &markerInfo ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::debugMarkerEndEXT( Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdDebugMarkerEndEXT( m_commandBuffer );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( pMarkerInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( &markerInfo ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdDispatch( m_commandBuffer, groupCountX, groupCountY, groupCountZ );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::dispatchBase( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdDispatchBase( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
+  }
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::dispatchBaseKHR( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdDispatchBaseKHR( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdDispatchIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdDraw( m_commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdDrawIndexed( m_commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdDrawIndexedIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdDrawIndexedIndirectCount( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
+  }
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdDrawIndexedIndirectCountAMD( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
+  }
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdDrawIndexedIndirectCountKHR( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdDrawIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::drawIndirectByteCountEXT( uint32_t instanceCount, uint32_t firstInstance, VULKAN_HPP_NAMESPACE::Buffer counterBuffer, VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdDrawIndirectByteCountEXT( m_commandBuffer, instanceCount, firstInstance, static_cast<VkBuffer>( counterBuffer ), static_cast<VkDeviceSize>( counterBufferOffset ), counterOffset, vertexStride );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdDrawIndirectCount( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
+  }
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdDrawIndirectCountAMD( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
+  }
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdDrawIndirectCountKHR( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdDrawMeshTasksIndirectCountNV( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdDrawMeshTasksIndirectNV( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdDrawMeshTasksNV( m_commandBuffer, taskCount, firstTask );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::endConditionalRenderingEXT( Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdEndConditionalRenderingEXT( m_commandBuffer );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::endDebugUtilsLabelEXT( Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdEndDebugUtilsLabelEXT( m_commandBuffer );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdEndQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdEndQueryIndexedEXT( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, index );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::endRenderPass( Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdEndRenderPass( m_commandBuffer );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const SubpassEndInfo & subpassEndInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const SubpassEndInfo & subpassEndInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer* pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pCounterBufferOffsets, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdEndTransformFeedbackEXT( m_commandBuffer, firstCounterBuffer, counterBufferCount, reinterpret_cast<const VkBuffer *>( pCounterBuffers ), reinterpret_cast<const VkDeviceSize *>( pCounterBufferOffsets ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
+  {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    VULKAN_HPP_ASSERT( counterBufferOffsets.empty() || counterBuffers.size() == counterBufferOffsets.size() );
+#else
+    if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() )
+  {
+    throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::endTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" );
+  }
+#endif  /*VULKAN_HPP_NO_EXCEPTIONS*/
+
+    d.vkCmdEndTransformFeedbackEXT( m_commandBuffer, firstCounterBuffer, counterBuffers.size(), reinterpret_cast<const VkBuffer *>( counterBuffers.data() ), reinterpret_cast<const VkDeviceSize *>( counterBufferOffsets.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::executeCommands( uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdExecuteCommands( m_commandBuffer, commandBufferCount, reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::executeCommands( ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdExecuteCommands( m_commandBuffer, commandBuffers.size(), reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV* pGeneratedCommandsInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdExecuteGeneratedCommandsNV( m_commandBuffer, static_cast<VkBool32>( isPreprocessed ), reinterpret_cast<const VkGeneratedCommandsInfoNV *>( pGeneratedCommandsInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, const GeneratedCommandsInfoNV & generatedCommandsInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdExecuteGeneratedCommandsNV( m_commandBuffer, static_cast<VkBool32>( isPreprocessed ), reinterpret_cast<const VkGeneratedCommandsInfoNV *>( &generatedCommandsInfo ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize size, uint32_t data, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdFillBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), static_cast<VkDeviceSize>( size ), data );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdNextSubpass( m_commandBuffer, static_cast<VkSubpassContents>( contents ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdNextSubpass2( m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const SubpassBeginInfo & subpassBeginInfo, const SubpassEndInfo & subpassEndInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdNextSubpass2( m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdNextSubpass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const SubpassBeginInfo & subpassBeginInfo, const SubpassEndInfo & subpassEndInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdNextSubpass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdPipelineBarrier( m_commandBuffer, static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), static_cast<VkDependencyFlags>( dependencyFlags ), memoryBarrierCount, reinterpret_cast<const VkMemoryBarrier *>( pMemoryBarriers ), bufferMemoryBarrierCount, reinterpret_cast<const VkBufferMemoryBarrier *>( pBufferMemoryBarriers ), imageMemoryBarrierCount, reinterpret_cast<const VkImageMemoryBarrier *>( pImageMemoryBarriers ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdPipelineBarrier( m_commandBuffer, static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), static_cast<VkDependencyFlags>( dependencyFlags ), memoryBarriers.size(), reinterpret_cast<const VkMemoryBarrier *>( memoryBarriers.data() ), bufferMemoryBarriers.size(), reinterpret_cast<const VkBufferMemoryBarrier *>( bufferMemoryBarriers.data() ), imageMemoryBarriers.size(), reinterpret_cast<const VkImageMemoryBarrier *>( imageMemoryBarriers.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV* pGeneratedCommandsInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdPreprocessGeneratedCommandsNV( m_commandBuffer, reinterpret_cast<const VkGeneratedCommandsInfoNV *>( pGeneratedCommandsInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsNV( const GeneratedCommandsInfoNV & generatedCommandsInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdPreprocessGeneratedCommandsNV( m_commandBuffer, reinterpret_cast<const VkGeneratedCommandsInfoNV *>( &generatedCommandsInfo ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdPushConstants( m_commandBuffer, static_cast<VkPipelineLayout>( layout ), static_cast<VkShaderStageFlags>( stageFlags ), offset, size, pValues );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename T, typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, ArrayProxy<const T> const & values, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdPushConstants( m_commandBuffer, static_cast<VkPipelineLayout>( layout ), static_cast<VkShaderStageFlags>( stageFlags ), offset, values.size() * sizeof( T ), reinterpret_cast<const void *>( values.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet* pDescriptorWrites, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdPushDescriptorSetKHR( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), set, descriptorWriteCount, reinterpret_cast<const VkWriteDescriptorSet *>( pDescriptorWrites ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdPushDescriptorSetKHR( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), set, descriptorWrites.size(), reinterpret_cast<const VkWriteDescriptorSet *>( descriptorWrites.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, const void* pData, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdPushDescriptorSetWithTemplateKHR( m_commandBuffer, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), static_cast<VkPipelineLayout>( layout ), set, pData );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::resetEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdResetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdResetQueryPool( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageResolve* pRegions, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdResolveImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkImageResolve *>( pRegions ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageResolve> const & regions, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdResolveImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size(), reinterpret_cast<const VkImageResolve *>( regions.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2KHR* pResolveImageInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdResolveImage2KHR( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2KHR *>( pResolveImageInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::resolveImage2KHR( const ResolveImageInfo2KHR & resolveImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdResolveImage2KHR( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2KHR *>( &resolveImageInfo ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4], Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetBlendConstants( m_commandBuffer, blendConstants );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( const void* pCheckpointMarker, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetCheckpointNV( m_commandBuffer, pCheckpointMarker );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV* pCustomSampleOrders, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer, static_cast<VkCoarseSampleOrderTypeNV>( sampleOrderType ), customSampleOrderCount, reinterpret_cast<const VkCoarseSampleOrderCustomNV *>( pCustomSampleOrders ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, ArrayProxy<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> const & customSampleOrders, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer, static_cast<VkCoarseSampleOrderTypeNV>( sampleOrderType ), customSampleOrders.size(), reinterpret_cast<const VkCoarseSampleOrderCustomNV *>( customSampleOrders.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetCullModeEXT( m_commandBuffer, static_cast<VkCullModeFlags>( cullMode ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetDepthBounds( m_commandBuffer, minDepthBounds, maxDepthBounds );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetDepthBoundsTestEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthBoundsTestEnable ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetDepthCompareOpEXT( m_commandBuffer, static_cast<VkCompareOp>( depthCompareOp ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetDepthTestEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthTestEnable ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetDepthWriteEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthWriteEnable ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetDeviceMask( m_commandBuffer, deviceMask );
+  }
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHR( uint32_t deviceMask, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetDeviceMaskKHR( m_commandBuffer, deviceMask );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VULKAN_HPP_NAMESPACE::Rect2D* pDiscardRectangles, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetDiscardRectangleEXT( m_commandBuffer, firstDiscardRectangle, discardRectangleCount, reinterpret_cast<const VkRect2D *>( pDiscardRectangles ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetDiscardRectangleEXT( m_commandBuffer, firstDiscardRectangle, discardRectangles.size(), reinterpret_cast<const VkRect2D *>( discardRectangles.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pExclusiveScissors, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetExclusiveScissorNV( m_commandBuffer, firstExclusiveScissor, exclusiveScissorCount, reinterpret_cast<const VkRect2D *>( pExclusiveScissors ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & exclusiveScissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetExclusiveScissorNV( m_commandBuffer, firstExclusiveScissor, exclusiveScissors.size(), reinterpret_cast<const VkRect2D *>( exclusiveScissors.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D* pFragmentSize, const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetFragmentShadingRateKHR( m_commandBuffer, reinterpret_cast<const VkExtent2D *>( pFragmentSize ), reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR*>( combinerOps ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateKHR( const Extent2D & fragmentSize, const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetFragmentShadingRateKHR( m_commandBuffer, reinterpret_cast<const VkExtent2D *>( &fragmentSize ), reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR*>( combinerOps ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetFrontFaceEXT( m_commandBuffer, static_cast<VkFrontFace>( frontFace ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetLineStippleEXT( m_commandBuffer, lineStippleFactor, lineStipplePattern );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetLineWidth( m_commandBuffer, lineWidth );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL* pMarkerInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkCmdSetPerformanceMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceMarkerInfoINTEL *>( pMarkerInfo ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::setPerformanceMarkerINTEL( const PerformanceMarkerInfoINTEL & markerInfo, Dispatch const & d ) const
+  {
+    Result result = static_cast<Result>( d.vkCmdSetPerformanceMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceMarkerInfoINTEL *>( &markerInfo ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceMarkerINTEL" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL* pOverrideInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkCmdSetPerformanceOverrideINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceOverrideInfoINTEL *>( pOverrideInfo ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::setPerformanceOverrideINTEL( const PerformanceOverrideInfoINTEL & overrideInfo, Dispatch const & d ) const
+  {
+    Result result = static_cast<Result>( d.vkCmdSetPerformanceOverrideINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceOverrideInfoINTEL *>( &overrideInfo ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceOverrideINTEL" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL* pMarkerInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkCmdSetPerformanceStreamMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL *>( pMarkerInfo ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::setPerformanceStreamMarkerINTEL( const PerformanceStreamMarkerInfoINTEL & markerInfo, Dispatch const & d ) const
+  {
+    Result result = static_cast<Result>( d.vkCmdSetPerformanceStreamMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL *>( &markerInfo ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceStreamMarkerINTEL" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetPrimitiveTopologyEXT( m_commandBuffer, static_cast<VkPrimitiveTopology>( primitiveTopology ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT* pSampleLocationsInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetSampleLocationsEXT( m_commandBuffer, reinterpret_cast<const VkSampleLocationsInfoEXT *>( pSampleLocationsInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const SampleLocationsInfoEXT & sampleLocationsInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetSampleLocationsEXT( m_commandBuffer, reinterpret_cast<const VkSampleLocationsInfoEXT *>( &sampleLocationsInfo ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pScissors, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissorCount, reinterpret_cast<const VkRect2D *>( pScissors ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCountEXT( uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pScissors, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetScissorWithCountEXT( m_commandBuffer, scissorCount, reinterpret_cast<const VkRect2D *>( pScissors ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCountEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetScissorWithCountEXT( m_commandBuffer, scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetStencilCompareMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), compareMask );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, VULKAN_HPP_NAMESPACE::StencilOp failOp, VULKAN_HPP_NAMESPACE::StencilOp passOp, VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, VULKAN_HPP_NAMESPACE::CompareOp compareOp, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetStencilOpEXT( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), static_cast<VkStencilOp>( failOp ), static_cast<VkStencilOp>( passOp ), static_cast<VkStencilOp>( depthFailOp ), static_cast<VkCompareOp>( compareOp ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetStencilReference( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), reference );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetStencilTestEnableEXT( m_commandBuffer, static_cast<VkBool32>( stencilTestEnable ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetStencilWriteMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), writeMask );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::Viewport* pViewports, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV* pShadingRatePalettes, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetViewportShadingRatePaletteNV( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkShadingRatePaletteNV *>( pShadingRatePalettes ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( uint32_t firstViewport, ArrayProxy<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> const & shadingRatePalettes, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetViewportShadingRatePaletteNV( m_commandBuffer, firstViewport, shadingRatePalettes.size(), reinterpret_cast<const VkShadingRatePaletteNV *>( shadingRatePalettes.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ViewportWScalingNV* pViewportWScalings, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetViewportWScalingNV( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewportWScalingNV *>( pViewportWScalings ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const & viewportWScalings, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetViewportWScalingNV( m_commandBuffer, firstViewport, viewportWScalings.size(), reinterpret_cast<const VkViewportWScalingNV *>( viewportWScalings.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCountEXT( uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::Viewport* pViewports, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetViewportWithCountEXT( m_commandBuffer, viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCountEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetViewportWithCountEXT( m_commandBuffer, viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pRaygenShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pMissShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pHitShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pCallableShaderBindingTable, VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdTraceRaysIndirectKHR( m_commandBuffer, reinterpret_cast<const VkStridedBufferRegionKHR *>( pRaygenShaderBindingTable ), reinterpret_cast<const VkStridedBufferRegionKHR *>( pMissShaderBindingTable ), reinterpret_cast<const VkStridedBufferRegionKHR *>( pHitShaderBindingTable ), reinterpret_cast<const VkStridedBufferRegionKHR *>( pCallableShaderBindingTable ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR( const StridedBufferRegionKHR & raygenShaderBindingTable, const StridedBufferRegionKHR & missShaderBindingTable, const StridedBufferRegionKHR & hitShaderBindingTable, const StridedBufferRegionKHR & callableShaderBindingTable, VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdTraceRaysIndirectKHR( m_commandBuffer, reinterpret_cast<const VkStridedBufferRegionKHR *>( &raygenShaderBindingTable ), reinterpret_cast<const VkStridedBufferRegionKHR *>( &missShaderBindingTable ), reinterpret_cast<const VkStridedBufferRegionKHR *>( &hitShaderBindingTable ), reinterpret_cast<const VkStridedBufferRegionKHR *>( &callableShaderBindingTable ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pRaygenShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pMissShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pHitShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pCallableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdTraceRaysKHR( m_commandBuffer, reinterpret_cast<const VkStridedBufferRegionKHR *>( pRaygenShaderBindingTable ), reinterpret_cast<const VkStridedBufferRegionKHR *>( pMissShaderBindingTable ), reinterpret_cast<const VkStridedBufferRegionKHR *>( pHitShaderBindingTable ), reinterpret_cast<const VkStridedBufferRegionKHR *>( pCallableShaderBindingTable ), width, height, depth );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( const StridedBufferRegionKHR & raygenShaderBindingTable, const StridedBufferRegionKHR & missShaderBindingTable, const StridedBufferRegionKHR & hitShaderBindingTable, const StridedBufferRegionKHR & callableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdTraceRaysKHR( m_commandBuffer, reinterpret_cast<const VkStridedBufferRegionKHR *>( &raygenShaderBindingTable ), reinterpret_cast<const VkStridedBufferRegionKHR *>( &missShaderBindingTable ), reinterpret_cast<const VkStridedBufferRegionKHR *>( &hitShaderBindingTable ), reinterpret_cast<const VkStridedBufferRegionKHR *>( &callableShaderBindingTable ), width, height, depth );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdTraceRaysNV( m_commandBuffer, static_cast<VkBuffer>( raygenShaderBindingTableBuffer ), static_cast<VkDeviceSize>( raygenShaderBindingOffset ), static_cast<VkBuffer>( missShaderBindingTableBuffer ), static_cast<VkDeviceSize>( missShaderBindingOffset ), static_cast<VkDeviceSize>( missShaderBindingStride ), static_cast<VkBuffer>( hitShaderBindingTableBuffer ), static_cast<VkDeviceSize>( hitShaderBindingOffset ), static_cast<VkDeviceSize>( hitShaderBindingStride ), static_cast<VkBuffer>( callableShaderBindingTableBuffer ), static_cast<VkDeviceSize>( callableShaderBindingOffset ), static_cast<VkDeviceSize>( callableShaderBindingStride ), width, height, depth );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize dataSize, const void* pData, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdUpdateBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), static_cast<VkDeviceSize>( dataSize ), pData );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename T, typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, ArrayProxy<const T> const & data, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdUpdateBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), data.size() * sizeof( T ), reinterpret_cast<const void *>( data.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::waitEvents( uint32_t eventCount, const VULKAN_HPP_NAMESPACE::Event* pEvents, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdWaitEvents( m_commandBuffer, eventCount, reinterpret_cast<const VkEvent *>( pEvents ), static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), memoryBarrierCount, reinterpret_cast<const VkMemoryBarrier *>( pMemoryBarriers ), bufferMemoryBarrierCount, reinterpret_cast<const VkBufferMemoryBarrier *>( pBufferMemoryBarriers ), imageMemoryBarrierCount, reinterpret_cast<const VkImageMemoryBarrier *>( pImageMemoryBarriers ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::waitEvents( ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdWaitEvents( m_commandBuffer, events.size(), reinterpret_cast<const VkEvent *>( events.data() ), static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), memoryBarriers.size(), reinterpret_cast<const VkMemoryBarrier *>( memoryBarriers.data() ), bufferMemoryBarriers.size(), reinterpret_cast<const VkBufferMemoryBarrier *>( bufferMemoryBarriers.data() ), imageMemoryBarriers.size(), reinterpret_cast<const VkImageMemoryBarrier *>( imageMemoryBarriers.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdWriteAccelerationStructuresPropertiesKHR( m_commandBuffer, accelerationStructureCount, reinterpret_cast<const VkAccelerationStructureKHR *>( pAccelerationStructures ), static_cast<VkQueryType>( queryType ), static_cast<VkQueryPool>( queryPool ), firstQuery );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdWriteAccelerationStructuresPropertiesKHR( m_commandBuffer, accelerationStructures.size(), reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ), static_cast<VkQueryType>( queryType ), static_cast<VkQueryPool>( queryPool ), firstQuery );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer, accelerationStructureCount, reinterpret_cast<const VkAccelerationStructureKHR *>( pAccelerationStructures ), static_cast<VkQueryType>( queryType ), static_cast<VkQueryPool>( queryPool ), firstQuery );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer, accelerationStructures.size(), reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ), static_cast<VkQueryType>( queryType ), static_cast<VkQueryPool>( queryPool ), firstQuery );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, uint32_t marker, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdWriteBufferMarkerAMD( m_commandBuffer, static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), marker );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdWriteTimestamp( m_commandBuffer, static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkQueryPool>( queryPool ), query );
+  }
+
+
 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result CommandBuffer::end(Dispatch const &d) const
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::end( Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
     return static_cast<Result>( d.vkEndCommandBuffer( m_commandBuffer ) );
   }
 #else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<void>::type CommandBuffer::end(Dispatch const &d ) const
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::end( Dispatch const & d ) const
   {
     Result result = static_cast<Result>( d.vkEndCommandBuffer( m_commandBuffer ) );
-    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::end" );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::end" );
   }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
+
+
 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result CommandBuffer::reset( CommandBufferResetFlags flags, Dispatch const &d) const
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
     return static_cast<Result>( d.vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) ) );
   }
 #else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<void>::type CommandBuffer::reset( CommandBufferResetFlags flags, Dispatch const &d ) const
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const & d ) const
   {
     Result result = static_cast<Result>( d.vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) ) );
-    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::reset" );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::reset" );
   }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
+
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( PipelineBindPoint pipelineBindPoint, Pipeline pipeline, Dispatch const &d) const
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
-    d.vkCmdBindPipeline( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) );
+    return static_cast<Result>( d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
   }
 #else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( PipelineBindPoint pipelineBindPoint, Pipeline pipeline, Dispatch const &d ) const
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const
   {
-    d.vkCmdBindPipeline( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) );
+    Result result = static_cast<Result>( d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireFullScreenExclusiveModeEXT" );
   }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, uint32_t viewportCount, const Viewport* pViewports, Dispatch const &d) const
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
-    d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewport*>( pViewports ) );
+    return static_cast<Result>( d.vkAcquireNextImage2KHR( m_device, reinterpret_cast<const VkAcquireNextImageInfoKHR *>( pAcquireInfo ), pImageIndex ) );
   }
+
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, ArrayProxy<const Viewport> viewports, Dispatch const &d ) const
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<uint32_t> Device::acquireNextImage2KHR( const AcquireNextImageInfoKHR & acquireInfo, Dispatch const & d ) const
   {
-    d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewports.size() , reinterpret_cast<const VkViewport*>( viewports.data() ) );
+    uint32_t imageIndex;
+    Result result = static_cast<Result>( d.vkAcquireNextImage2KHR( m_device, reinterpret_cast<const VkAcquireNextImageInfoKHR *>( &acquireInfo ), &imageIndex ) );
+    return createResultValue( result, imageIndex, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImage2KHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout, VULKAN_HPP_NAMESPACE::Result::eNotReady, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
   }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, uint32_t scissorCount, const Rect2D* pScissors, Dispatch const &d) const
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t timeout, VULKAN_HPP_NAMESPACE::Semaphore semaphore, VULKAN_HPP_NAMESPACE::Fence fence, uint32_t* pImageIndex, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
-    d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissorCount, reinterpret_cast<const VkRect2D*>( pScissors ) );
+    return static_cast<Result>( d.vkAcquireNextImageKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), timeout, static_cast<VkSemaphore>( semaphore ), static_cast<VkFence>( fence ), pImageIndex ) );
   }
+
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, ArrayProxy<const Rect2D> scissors, Dispatch const &d ) const
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<uint32_t> Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t timeout, VULKAN_HPP_NAMESPACE::Semaphore semaphore, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const
   {
-    d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissors.size() , reinterpret_cast<const VkRect2D*>( scissors.data() ) );
+    uint32_t imageIndex;
+    Result result = static_cast<Result>( d.vkAcquireNextImageKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), timeout, static_cast<VkSemaphore>( semaphore ), static_cast<VkFence>( fence ), &imageIndex ) );
+    return createResultValue( result, imageIndex, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImageKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout, VULKAN_HPP_NAMESPACE::Result::eNotReady, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
   }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth, Dispatch const &d) const
-  {
-    d.vkCmdSetLineWidth( m_commandBuffer, lineWidth );
-  }
-#else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth, Dispatch const &d ) const
-  {
-    d.vkCmdSetLineWidth( m_commandBuffer, lineWidth );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const &d) const
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquirePerformanceConfigurationINTEL( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL* pConfiguration, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
-    d.vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor );
+    return static_cast<Result>( d.vkAcquirePerformanceConfigurationINTEL( m_device, reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( pAcquireInfo ), reinterpret_cast< VkPerformanceConfigurationINTEL *>( pConfiguration ) ) );
   }
-#else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const &d ) const
-  {
-    d.vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4], Dispatch const &d) const
-  {
-    d.vkCmdSetBlendConstants( m_commandBuffer, blendConstants );
-  }
-#else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4], Dispatch const &d ) const
-  {
-    d.vkCmdSetBlendConstants( m_commandBuffer, blendConstants );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const &d) const
-  {
-    d.vkCmdSetDepthBounds( m_commandBuffer, minDepthBounds, maxDepthBounds );
-  }
-#else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const &d ) const
-  {
-    d.vkCmdSetDepthBounds( m_commandBuffer, minDepthBounds, maxDepthBounds );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::setStencilCompareMask( StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const &d) const
-  {
-    d.vkCmdSetStencilCompareMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), compareMask );
-  }
-#else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::setStencilCompareMask( StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const &d ) const
-  {
-    d.vkCmdSetStencilCompareMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), compareMask );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::setStencilWriteMask( StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const &d) const
-  {
-    d.vkCmdSetStencilWriteMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), writeMask );
-  }
-#else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::setStencilWriteMask( StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const &d ) const
-  {
-    d.vkCmdSetStencilWriteMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), writeMask );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::setStencilReference( StencilFaceFlags faceMask, uint32_t reference, Dispatch const &d) const
-  {
-    d.vkCmdSetStencilReference( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), reference );
-  }
-#else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::setStencilReference( StencilFaceFlags faceMask, uint32_t reference, Dispatch const &d ) const
-  {
-    d.vkCmdSetStencilReference( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), reference );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( PipelineBindPoint pipelineBindPoint, PipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const DescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets, Dispatch const &d) const
-  {
-    d.vkCmdBindDescriptorSets( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), firstSet, descriptorSetCount, reinterpret_cast<const VkDescriptorSet*>( pDescriptorSets ), dynamicOffsetCount, pDynamicOffsets );
-  }
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( PipelineBindPoint pipelineBindPoint, PipelineLayout layout, uint32_t firstSet, ArrayProxy<const DescriptorSet> descriptorSets, ArrayProxy<const uint32_t> dynamicOffsets, Dispatch const &d ) const
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL>::type Device::acquirePerformanceConfigurationINTEL( const PerformanceConfigurationAcquireInfoINTEL & acquireInfo, Dispatch const & d ) const
   {
-    d.vkCmdBindDescriptorSets( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), firstSet, descriptorSets.size() , reinterpret_cast<const VkDescriptorSet*>( descriptorSets.data() ), dynamicOffsets.size() , dynamicOffsets.data() );
+    VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration;
+    Result result = static_cast<Result>( d.vkAcquirePerformanceConfigurationINTEL( m_device, reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( &acquireInfo ), reinterpret_cast< VkPerformanceConfigurationINTEL *>( &configuration ) ) );
+    return createResultValue( result, configuration, VULKAN_HPP_NAMESPACE_STRING "::Device::acquirePerformanceConfigurationINTEL" );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL, Dispatch>>::type Device::acquirePerformanceConfigurationINTELUnique( const PerformanceConfigurationAcquireInfoINTEL & acquireInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration;
+    Result result = static_cast<Result>( d.vkAcquirePerformanceConfigurationINTEL( m_device, reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( &acquireInfo ), reinterpret_cast< VkPerformanceConfigurationINTEL *>( &configuration ) ) );
+    ObjectRelease<Device, Dispatch> deleter( *this, d );
+    return createResultValue<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL, Dispatch>( result, configuration, VULKAN_HPP_NAMESPACE_STRING "::Device::acquirePerformanceConfigurationINTELUnique", deleter );
+  }
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR* pInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast<const VkAcquireProfilingLockInfoKHR *>( pInfo ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::acquireProfilingLockKHR( const AcquireProfilingLockInfoKHR & info, Dispatch const & d ) const
+  {
+    Result result = static_cast<Result>( d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast<const VkAcquireProfilingLockInfoKHR *>( &info ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireProfilingLockKHR" );
   }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo* pAllocateInfo, VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkAllocateCommandBuffers( m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( pAllocateInfo ), reinterpret_cast< VkCommandBuffer *>( pCommandBuffers ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename CommandBufferAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<CommandBuffer, CommandBufferAllocator>>::type Device::allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Dispatch const & d ) const
+  {
+    std::vector<CommandBuffer, CommandBufferAllocator> commandBuffers( allocateInfo.commandBufferCount );
+    Result result = static_cast<Result>( d.vkAllocateCommandBuffers( m_device, reinterpret_cast<const VkCommandBufferAllocateInfo*>( &allocateInfo ), reinterpret_cast<VkCommandBuffer*>( commandBuffers.data() ) ) );
+    return createResultValue( result, commandBuffers, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" );
+  }
+
+  template <typename CommandBufferAllocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, CommandBuffer>::value, int>::type >
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<CommandBuffer, CommandBufferAllocator>>::type Device::allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, CommandBufferAllocator & commandBufferAllocator, Dispatch const & d ) const
+  {
+    std::vector<CommandBuffer, CommandBufferAllocator> commandBuffers( allocateInfo.commandBufferCount, commandBufferAllocator );
+    Result result = static_cast<Result>( d.vkAllocateCommandBuffers( m_device, reinterpret_cast<const VkCommandBufferAllocateInfo*>( &allocateInfo ), reinterpret_cast<VkCommandBuffer*>( commandBuffers.data() ) ) );
+    return createResultValue( result, commandBuffers, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch, typename CommandBufferAllocator>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<CommandBuffer, Dispatch>, CommandBufferAllocator>>::type Device::allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, Dispatch const & d ) const
+  {
+    std::vector<UniqueHandle<CommandBuffer, Dispatch>, CommandBufferAllocator> uniqueCommandBuffers;
+    std::vector<CommandBuffer> commandBuffers( allocateInfo.commandBufferCount );
+    Result result = static_cast<Result>( d.vkAllocateCommandBuffers( m_device, reinterpret_cast<const VkCommandBufferAllocateInfo*>( &allocateInfo ), reinterpret_cast<VkCommandBuffer*>( commandBuffers.data() ) ) );
+    if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+    {
+      uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount );
+      PoolFree<Device, CommandPool, Dispatch> deleter( *this, allocateInfo.commandPool, d );
+      for ( size_t i=0; i < allocateInfo.commandBufferCount; i++ )
+      {
+        uniqueCommandBuffers.push_back( UniqueHandle<CommandBuffer, Dispatch>( commandBuffers[i], deleter ) );
+      }
+    }
+    return createResultValue( result, std::move( uniqueCommandBuffers ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" );
+  }
+
+  template <typename Dispatch, typename CommandBufferAllocator, typename B, typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<CommandBuffer, Dispatch>>::value, int>::type >
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<CommandBuffer, Dispatch>, CommandBufferAllocator>>::type Device::allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, CommandBufferAllocator & commandBufferAllocator, Dispatch const & d ) const
+  {
+    std::vector<UniqueHandle<CommandBuffer, Dispatch>, CommandBufferAllocator> uniqueCommandBuffers( commandBufferAllocator );
+    std::vector<CommandBuffer> commandBuffers( allocateInfo.commandBufferCount );
+    Result result = static_cast<Result>( d.vkAllocateCommandBuffers( m_device, reinterpret_cast<const VkCommandBufferAllocateInfo*>( &allocateInfo ), reinterpret_cast<VkCommandBuffer*>( commandBuffers.data() ) ) );
+    if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+    {
+      uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount );
+      PoolFree<Device, CommandPool, Dispatch> deleter( *this, allocateInfo.commandPool, d );
+      for ( size_t i=0; i < allocateInfo.commandBufferCount; i++ )
+      {
+        uniqueCommandBuffers.push_back( UniqueHandle<CommandBuffer, Dispatch>( commandBuffers[i], deleter ) );
+      }
+    }
+    return createResultValue( result, std::move( uniqueCommandBuffers ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" );
+  }
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo* pAllocateInfo, VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkAllocateDescriptorSets( m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( pAllocateInfo ), reinterpret_cast< VkDescriptorSet *>( pDescriptorSets ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename DescriptorSetAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<DescriptorSet, DescriptorSetAllocator>>::type Device::allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d ) const
+  {
+    std::vector<DescriptorSet, DescriptorSetAllocator> descriptorSets( allocateInfo.descriptorSetCount );
+    Result result = static_cast<Result>( d.vkAllocateDescriptorSets( m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo*>( &allocateInfo ), reinterpret_cast<VkDescriptorSet*>( descriptorSets.data() ) ) );
+    return createResultValue( result, descriptorSets, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" );
+  }
+
+  template <typename DescriptorSetAllocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, DescriptorSet>::value, int>::type >
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<DescriptorSet, DescriptorSetAllocator>>::type Device::allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, DescriptorSetAllocator & descriptorSetAllocator, Dispatch const & d ) const
+  {
+    std::vector<DescriptorSet, DescriptorSetAllocator> descriptorSets( allocateInfo.descriptorSetCount, descriptorSetAllocator );
+    Result result = static_cast<Result>( d.vkAllocateDescriptorSets( m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo*>( &allocateInfo ), reinterpret_cast<VkDescriptorSet*>( descriptorSets.data() ) ) );
+    return createResultValue( result, descriptorSets, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch, typename DescriptorSetAllocator>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<DescriptorSet, Dispatch>, DescriptorSetAllocator>>::type Device::allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d ) const
+  {
+    std::vector<UniqueHandle<DescriptorSet, Dispatch>, DescriptorSetAllocator> uniqueDescriptorSets;
+    std::vector<DescriptorSet> descriptorSets( allocateInfo.descriptorSetCount );
+    Result result = static_cast<Result>( d.vkAllocateDescriptorSets( m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo*>( &allocateInfo ), reinterpret_cast<VkDescriptorSet*>( descriptorSets.data() ) ) );
+    if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+    {
+      uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount );
+      PoolFree<Device, DescriptorPool, Dispatch> deleter( *this, allocateInfo.descriptorPool, d );
+      for ( size_t i=0; i < allocateInfo.descriptorSetCount; i++ )
+      {
+        uniqueDescriptorSets.push_back( UniqueHandle<DescriptorSet, Dispatch>( descriptorSets[i], deleter ) );
+      }
+    }
+    return createResultValue( result, std::move( uniqueDescriptorSets ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" );
+  }
+
+  template <typename Dispatch, typename DescriptorSetAllocator, typename B, typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<DescriptorSet, Dispatch>>::value, int>::type >
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<DescriptorSet, Dispatch>, DescriptorSetAllocator>>::type Device::allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, DescriptorSetAllocator & descriptorSetAllocator, Dispatch const & d ) const
+  {
+    std::vector<UniqueHandle<DescriptorSet, Dispatch>, DescriptorSetAllocator> uniqueDescriptorSets( descriptorSetAllocator );
+    std::vector<DescriptorSet> descriptorSets( allocateInfo.descriptorSetCount );
+    Result result = static_cast<Result>( d.vkAllocateDescriptorSets( m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo*>( &allocateInfo ), reinterpret_cast<VkDescriptorSet*>( descriptorSets.data() ) ) );
+    if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+    {
+      uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount );
+      PoolFree<Device, DescriptorPool, Dispatch> deleter( *this, allocateInfo.descriptorPool, d );
+      for ( size_t i=0; i < allocateInfo.descriptorSetCount; i++ )
+      {
+        uniqueDescriptorSets.push_back( UniqueHandle<DescriptorSet, Dispatch>( descriptorSets[i], deleter ) );
+      }
+    }
+    return createResultValue( result, std::move( uniqueDescriptorSets ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" );
+  }
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo* pAllocateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DeviceMemory* pMemory, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkAllocateMemory( m_device, reinterpret_cast<const VkMemoryAllocateInfo *>( pAllocateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkDeviceMemory *>( pMemory ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceMemory>::type Device::allocateMemory( const MemoryAllocateInfo & allocateInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::DeviceMemory memory;
+    Result result = static_cast<Result>( d.vkAllocateMemory( m_device, reinterpret_cast<const VkMemoryAllocateInfo *>( &allocateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkDeviceMemory *>( &memory ) ) );
+    return createResultValue( result, memory, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemory" );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DeviceMemory, Dispatch>>::type Device::allocateMemoryUnique( const MemoryAllocateInfo & allocateInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::DeviceMemory memory;
+    Result result = static_cast<Result>( d.vkAllocateMemory( m_device, reinterpret_cast<const VkMemoryAllocateInfo *>( &allocateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkDeviceMemory *>( &memory ) ) );
+    ObjectFree<Device, Dispatch> deleter( *this, allocator, d );
+    return createResultValue<VULKAN_HPP_NAMESPACE::DeviceMemory, Dispatch>( result, memory, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemoryUnique", deleter );
+  }
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindAccelerationStructureMemoryKHR( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoKHR* pBindInfos, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkBindAccelerationStructureMemoryKHR( m_device, bindInfoCount, reinterpret_cast<const VkBindAccelerationStructureMemoryInfoKHR *>( pBindInfos ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindAccelerationStructureMemoryKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoKHR> const & bindInfos, Dispatch const & d ) const
+  {
+    Result result = static_cast<Result>( d.vkBindAccelerationStructureMemoryKHR( m_device, bindInfos.size(), reinterpret_cast<const VkBindAccelerationStructureMemoryInfoKHR *>( bindInfos.data() ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindAccelerationStructureMemoryKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindAccelerationStructureMemoryNV( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoKHR* pBindInfos, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkBindAccelerationStructureMemoryNV( m_device, bindInfoCount, reinterpret_cast<const VkBindAccelerationStructureMemoryInfoKHR *>( pBindInfos ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindAccelerationStructureMemoryNV( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoKHR> const & bindInfos, Dispatch const & d ) const
+  {
+    Result result = static_cast<Result>( d.vkBindAccelerationStructureMemoryNV( m_device, bindInfos.size(), reinterpret_cast<const VkBindAccelerationStructureMemoryInfoKHR *>( bindInfos.data() ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindAccelerationStructureMemoryNV" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
 
 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer( Buffer buffer, DeviceSize offset, IndexType indexType, Dispatch const &d) const
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
-    d.vkCmdBindIndexBuffer( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, static_cast<VkIndexType>( indexType ) );
+    return static_cast<Result>( d.vkBindBufferMemory( m_device, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) );
   }
 #else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer( Buffer buffer, DeviceSize offset, IndexType indexType, Dispatch const &d ) const
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d ) const
   {
-    d.vkCmdBindIndexBuffer( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, static_cast<VkIndexType>( indexType ) );
+    Result result = static_cast<Result>( d.vkBindBufferMemory( m_device, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory" );
   }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, uint32_t bindingCount, const Buffer* pBuffers, const DeviceSize* pOffsets, Dispatch const &d) const
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory2( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo* pBindInfos, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
-    d.vkCmdBindVertexBuffers( m_commandBuffer, firstBinding, bindingCount, reinterpret_cast<const VkBuffer*>( pBuffers ), pOffsets );
+    return static_cast<Result>( d.vkBindBufferMemory2( m_device, bindInfoCount, reinterpret_cast<const VkBindBufferMemoryInfo *>( pBindInfos ) ) );
   }
+
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, ArrayProxy<const Buffer> buffers, ArrayProxy<const DeviceSize> offsets, Dispatch const &d ) const
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindBufferMemory2( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos, Dispatch const & d ) const
+  {
+    Result result = static_cast<Result>( d.vkBindBufferMemory2( m_device, bindInfos.size(), reinterpret_cast<const VkBindBufferMemoryInfo *>( bindInfos.data() ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory2KHR( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo* pBindInfos, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkBindBufferMemory2KHR( m_device, bindInfoCount, reinterpret_cast<const VkBindBufferMemoryInfo *>( pBindInfos ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindBufferMemory2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos, Dispatch const & d ) const
+  {
+    Result result = static_cast<Result>( d.vkBindBufferMemory2KHR( m_device, bindInfos.size(), reinterpret_cast<const VkBindBufferMemoryInfo *>( bindInfos.data() ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2KHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkBindImageMemory( m_device, static_cast<VkImage>( image ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) );
+  }
+#else
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d ) const
+  {
+    Result result = static_cast<Result>( d.vkBindImageMemory( m_device, static_cast<VkImage>( image ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory2( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo* pBindInfos, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkBindImageMemory2( m_device, bindInfoCount, reinterpret_cast<const VkBindImageMemoryInfo *>( pBindInfos ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindImageMemory2( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos, Dispatch const & d ) const
+  {
+    Result result = static_cast<Result>( d.vkBindImageMemory2( m_device, bindInfos.size(), reinterpret_cast<const VkBindImageMemoryInfo *>( bindInfos.data() ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory2KHR( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo* pBindInfos, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkBindImageMemory2KHR( m_device, bindInfoCount, reinterpret_cast<const VkBindImageMemoryInfo *>( pBindInfos ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindImageMemory2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos, Dispatch const & d ) const
+  {
+    Result result = static_cast<Result>( d.vkBindImageMemory2KHR( m_device, bindInfos.size(), reinterpret_cast<const VkBindImageMemoryInfo *>( bindInfos.data() ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2KHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::buildAccelerationStructureKHR( uint32_t infoCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR* pInfos, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildOffsetInfoKHR* const * ppOffsetInfos, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkBuildAccelerationStructureKHR( m_device, infoCount, reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pInfos ), reinterpret_cast<const VkAccelerationStructureBuildOffsetInfoKHR * const *>( ppOffsetInfos ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::buildAccelerationStructureKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos, ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildOffsetInfoKHR* const > const & pOffsetInfos, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
   {
 #ifdef VULKAN_HPP_NO_EXCEPTIONS
-    VULKAN_HPP_ASSERT( buffers.size() == offsets.size() );
+    VULKAN_HPP_ASSERT( infos.size() == pOffsetInfos.size() );
 #else
-    if ( buffers.size() != offsets.size() )
+    if ( infos.size() != pOffsetInfos.size() )
+  {
+    throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructureKHR: infos.size() != pOffsetInfos.size()" );
+  }
+#endif  /*VULKAN_HPP_NO_EXCEPTIONS*/
+
+    Result result = static_cast<Result>( d.vkBuildAccelerationStructureKHR( m_device, infos.size(), reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ), reinterpret_cast<const VkAccelerationStructureBuildOffsetInfoKHR * const *>( pOffsetInfos.data() ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructureKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkCompileDeferredNV( m_device, static_cast<VkPipeline>( pipeline ), shader ) );
+  }
+#else
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const & d ) const
+  {
+    Result result = static_cast<Result>( d.vkCompileDeferredNV( m_device, static_cast<VkPipeline>( pipeline ), shader ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::compileDeferredNV" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR* pInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkCopyAccelerationStructureKHR( m_device, reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( pInfo ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyAccelerationStructureKHR( const CopyAccelerationStructureInfoKHR & info, Dispatch const & d ) const
+  {
+    Result result = static_cast<Result>( d.vkCopyAccelerationStructureKHR( m_device, reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( &info ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR* pInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkCopyAccelerationStructureToMemoryKHR( m_device, reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( pInfo ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyAccelerationStructureToMemoryKHR( const CopyAccelerationStructureToMemoryInfoKHR & info, Dispatch const & d ) const
+  {
+    Result result = static_cast<Result>( d.vkCopyAccelerationStructureToMemoryKHR( m_device, reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( &info ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureToMemoryKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR* pInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkCopyMemoryToAccelerationStructureKHR( m_device, reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( pInfo ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMemoryToAccelerationStructureKHR( const CopyMemoryToAccelerationStructureInfoKHR & info, Dispatch const & d ) const
+  {
+    Result result = static_cast<Result>( d.vkCopyMemoryToAccelerationStructureKHR( m_device, reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( &info ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToAccelerationStructureKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructure, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkCreateAccelerationStructureKHR( m_device, reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkAccelerationStructureKHR *>( pAccelerationStructure ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR>::type Device::createAccelerationStructureKHR( const AccelerationStructureCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure;
+    Result result = static_cast<Result>( d.vkCreateAccelerationStructureKHR( m_device, reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkAccelerationStructureKHR *>( &accelerationStructure ) ) );
+    return createResultValue( result, accelerationStructure, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureKHR" );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR, Dispatch>>::type Device::createAccelerationStructureKHRUnique( const AccelerationStructureCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure;
+    Result result = static_cast<Result>( d.vkCreateAccelerationStructureKHR( m_device, reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkAccelerationStructureKHR *>( &accelerationStructure ) ) );
+    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+    return createResultValue<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR, Dispatch>( result, accelerationStructure, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureKHRUnique", deleter );
+  }
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::AccelerationStructureNV* pAccelerationStructure, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkCreateAccelerationStructureNV( m_device, reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkAccelerationStructureNV *>( pAccelerationStructure ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::AccelerationStructureNV>::type Device::createAccelerationStructureNV( const AccelerationStructureCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure;
+    Result result = static_cast<Result>( d.vkCreateAccelerationStructureNV( m_device, reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkAccelerationStructureNV *>( &accelerationStructure ) ) );
+    return createResultValue( result, accelerationStructure, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureNV" );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::AccelerationStructureNV, Dispatch>>::type Device::createAccelerationStructureNVUnique( const AccelerationStructureCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure;
+    Result result = static_cast<Result>( d.vkCreateAccelerationStructureNV( m_device, reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkAccelerationStructureNV *>( &accelerationStructure ) ) );
+    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+    return createResultValue<VULKAN_HPP_NAMESPACE::AccelerationStructureNV, Dispatch>( result, accelerationStructure, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureNVUnique", deleter );
+  }
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Buffer* pBuffer, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkCreateBuffer( m_device, reinterpret_cast<const VkBufferCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkBuffer *>( pBuffer ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Buffer>::type Device::createBuffer( const BufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::Buffer buffer;
+    Result result = static_cast<Result>( d.vkCreateBuffer( m_device, reinterpret_cast<const VkBufferCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkBuffer *>( &buffer ) ) );
+    return createResultValue( result, buffer, VULKAN_HPP_NAMESPACE_STRING "::Device::createBuffer" );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Buffer, Dispatch>>::type Device::createBufferUnique( const BufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::Buffer buffer;
+    Result result = static_cast<Result>( d.vkCreateBuffer( m_device, reinterpret_cast<const VkBufferCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkBuffer *>( &buffer ) ) );
+    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+    return createResultValue<VULKAN_HPP_NAMESPACE::Buffer, Dispatch>( result, buffer, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferUnique", deleter );
+  }
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::BufferView* pView, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkCreateBufferView( m_device, reinterpret_cast<const VkBufferViewCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkBufferView *>( pView ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::BufferView>::type Device::createBufferView( const BufferViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::BufferView view;
+    Result result = static_cast<Result>( d.vkCreateBufferView( m_device, reinterpret_cast<const VkBufferViewCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkBufferView *>( &view ) ) );
+    return createResultValue( result, view, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferView" );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::BufferView, Dispatch>>::type Device::createBufferViewUnique( const BufferViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::BufferView view;
+    Result result = static_cast<Result>( d.vkCreateBufferView( m_device, reinterpret_cast<const VkBufferViewCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkBufferView *>( &view ) ) );
+    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+    return createResultValue<VULKAN_HPP_NAMESPACE::BufferView, Dispatch>( result, view, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferViewUnique", deleter );
+  }
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::CommandPool* pCommandPool, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkCreateCommandPool( m_device, reinterpret_cast<const VkCommandPoolCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkCommandPool *>( pCommandPool ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::CommandPool>::type Device::createCommandPool( const CommandPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::CommandPool commandPool;
+    Result result = static_cast<Result>( d.vkCreateCommandPool( m_device, reinterpret_cast<const VkCommandPoolCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkCommandPool *>( &commandPool ) ) );
+    return createResultValue( result, commandPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPool" );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandPool, Dispatch>>::type Device::createCommandPoolUnique( const CommandPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::CommandPool commandPool;
+    Result result = static_cast<Result>( d.vkCreateCommandPool( m_device, reinterpret_cast<const VkCommandPoolCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkCommandPool *>( &commandPool ) ) );
+    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+    return createResultValue<VULKAN_HPP_NAMESPACE::CommandPool, Dispatch>( result, commandPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPoolUnique", deleter );
+  }
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfoCount, reinterpret_cast<const VkComputePipelineCreateInfo *>( pCreateInfos ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkPipeline *>( pPipelines ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename PipelineAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline, PipelineAllocator>> Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    std::vector<Pipeline, PipelineAllocator> pipelines( createInfos.size() );
+    Result result = static_cast<Result>( d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkComputePipelineCreateInfo*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( pipelines.data() ) ) );
+    return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+  }
+
+  template <typename PipelineAllocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type >
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline, PipelineAllocator>> Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, Optional<const AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d ) const
+  {
+    std::vector<Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator );
+    Result result = static_cast<Result>( d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkComputePipelineCreateInfo*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( pipelines.data() ) ) );
+    return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+  }
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<Pipeline> Device::createComputePipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    Pipeline pipeline;
+    Result result = static_cast<Result>( d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1, reinterpret_cast<const VkComputePipelineCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( &pipeline ) ) );
+    return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipeline", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch, typename PipelineAllocator>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> Device::createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines;
+    std::vector<Pipeline> pipelines( createInfos.size() );
+    Result result = static_cast<Result>( d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkComputePipelineCreateInfo*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( pipelines.data() ) ) );
+    if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )|| ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
     {
-      throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers: buffers.size() != offsets.size()" );
+      uniquePipelines.reserve( createInfos.size() );
+      ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+      for ( size_t i=0; i < createInfos.size(); i++ )
+      {
+        uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) );
+      }
     }
-#endif  // VULKAN_HPP_NO_EXCEPTIONS
-    d.vkCmdBindVertexBuffers( m_commandBuffer, firstBinding, buffers.size() , reinterpret_cast<const VkBuffer*>( buffers.data() ), offsets.data() );
+    return createResultValue( result, std::move( uniquePipelines ), VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
   }
+
+  template <typename Dispatch, typename PipelineAllocator, typename B, typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type >
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> Device::createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, Optional<const AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d ) const
+  {
+    std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator );
+    std::vector<Pipeline> pipelines( createInfos.size() );
+    Result result = static_cast<Result>( d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkComputePipelineCreateInfo*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( pipelines.data() ) ) );
+    if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )|| ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
+    {
+      uniquePipelines.reserve( createInfos.size() );
+      ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+      for ( size_t i=0; i < createInfos.size(); i++ )
+      {
+        uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) );
+      }
+    }
+    return createResultValue( result, std::move( uniquePipelines ), VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+  }
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<Pipeline, Dispatch>> Device::createComputePipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    Pipeline pipeline;
+    Result result = static_cast<Result>( d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1, reinterpret_cast<const VkComputePipelineCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( &pipeline ) ) );
+    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+    return createResultValue<Pipeline, Dispatch>( result, pipeline, VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelineUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT }, deleter );
+  }
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const &d) const
-  {
-    d.vkCmdDraw( m_commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance );
-  }
-#else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const &d ) const
-  {
-    d.vkCmdDraw( m_commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance, Dispatch const &d) const
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDeferredOperationKHR( const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DeferredOperationKHR* pDeferredOperation, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
-    d.vkCmdDrawIndexed( m_commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance );
+    return static_cast<Result>( d.vkCreateDeferredOperationKHR( m_device, reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkDeferredOperationKHR *>( pDeferredOperation ) ) );
   }
-#else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance, Dispatch const &d ) const
-  {
-    d.vkCmdDrawIndexed( m_commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::drawIndirect( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d) const
-  {
-    d.vkCmdDrawIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, drawCount, stride );
-  }
-#else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::drawIndirect( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d ) const
-  {
-    d.vkCmdDrawIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, drawCount, stride );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirect( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d) const
-  {
-    d.vkCmdDrawIndexedIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, drawCount, stride );
-  }
-#else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirect( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d ) const
-  {
-    d.vkCmdDrawIndexedIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, drawCount, stride );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d) const
-  {
-    d.vkCmdDispatch( m_commandBuffer, groupCountX, groupCountY, groupCountZ );
-  }
-#else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d ) const
-  {
-    d.vkCmdDispatch( m_commandBuffer, groupCountX, groupCountY, groupCountZ );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::dispatchIndirect( Buffer buffer, DeviceSize offset, Dispatch const &d) const
-  {
-    d.vkCmdDispatchIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset );
-  }
-#else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::dispatchIndirect( Buffer buffer, DeviceSize offset, Dispatch const &d ) const
-  {
-    d.vkCmdDispatchIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( Buffer srcBuffer, Buffer dstBuffer, uint32_t regionCount, const BufferCopy* pRegions, Dispatch const &d) const
-  {
-    d.vkCmdCopyBuffer( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkBuffer>( dstBuffer ), regionCount, reinterpret_cast<const VkBufferCopy*>( pRegions ) );
-  }
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( Buffer srcBuffer, Buffer dstBuffer, ArrayProxy<const BufferCopy> regions, Dispatch const &d ) const
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeferredOperationKHR>::type Device::createDeferredOperationKHR( Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
   {
-    d.vkCmdCopyBuffer( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkBuffer>( dstBuffer ), regions.size() , reinterpret_cast<const VkBufferCopy*>( regions.data() ) );
+    VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation;
+    Result result = static_cast<Result>( d.vkCreateDeferredOperationKHR( m_device, reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkDeferredOperationKHR *>( &deferredOperation ) ) );
+    return createResultValue( result, deferredOperation, VULKAN_HPP_NAMESPACE_STRING "::Device::createDeferredOperationKHR" );
   }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::copyImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const ImageCopy* pRegions, Dispatch const &d) const
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DeferredOperationKHR, Dispatch>>::type Device::createDeferredOperationKHRUnique( Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
   {
-    d.vkCmdCopyImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkImageCopy*>( pRegions ) );
+    VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation;
+    Result result = static_cast<Result>( d.vkCreateDeferredOperationKHR( m_device, reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkDeferredOperationKHR *>( &deferredOperation ) ) );
+    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+    return createResultValue<VULKAN_HPP_NAMESPACE::DeferredOperationKHR, Dispatch>( result, deferredOperation, VULKAN_HPP_NAMESPACE_STRING "::Device::createDeferredOperationKHRUnique", deleter );
   }
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorPool* pDescriptorPool, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkCreateDescriptorPool( m_device, reinterpret_cast<const VkDescriptorPoolCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkDescriptorPool *>( pDescriptorPool ) ) );
+  }
+
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::copyImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, ArrayProxy<const ImageCopy> regions, Dispatch const &d ) const
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorPool>::type Device::createDescriptorPool( const DescriptorPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
   {
-    d.vkCmdCopyImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size() , reinterpret_cast<const VkImageCopy*>( regions.data() ) );
+    VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool;
+    Result result = static_cast<Result>( d.vkCreateDescriptorPool( m_device, reinterpret_cast<const VkDescriptorPoolCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkDescriptorPool *>( &descriptorPool ) ) );
+    return createResultValue( result, descriptorPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPool" );
   }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorPool, Dispatch>>::type Device::createDescriptorPoolUnique( const DescriptorPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool;
+    Result result = static_cast<Result>( d.vkCreateDescriptorPool( m_device, reinterpret_cast<const VkDescriptorPoolCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkDescriptorPool *>( &descriptorPool ) ) );
+    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+    return createResultValue<VULKAN_HPP_NAMESPACE::DescriptorPool, Dispatch>( result, descriptorPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPoolUnique", deleter );
+  }
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::blitImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const ImageBlit* pRegions, Filter filter, Dispatch const &d) const
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayout, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
-    d.vkCmdBlitImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkImageBlit*>( pRegions ), static_cast<VkFilter>( filter ) );
+    return static_cast<Result>( d.vkCreateDescriptorSetLayout( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkDescriptorSetLayout *>( pSetLayout ) ) );
   }
+
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::blitImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, ArrayProxy<const ImageBlit> regions, Filter filter, Dispatch const &d ) const
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorSetLayout>::type Device::createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
   {
-    d.vkCmdBlitImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size() , reinterpret_cast<const VkImageBlit*>( regions.data() ), static_cast<VkFilter>( filter ) );
+    VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout;
+    Result result = static_cast<Result>( d.vkCreateDescriptorSetLayout( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkDescriptorSetLayout *>( &setLayout ) ) );
+    return createResultValue( result, setLayout, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayout" );
   }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSetLayout, Dispatch>>::type Device::createDescriptorSetLayoutUnique( const DescriptorSetLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout;
+    Result result = static_cast<Result>( d.vkCreateDescriptorSetLayout( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkDescriptorSetLayout *>( &setLayout ) ) );
+    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+    return createResultValue<VULKAN_HPP_NAMESPACE::DescriptorSetLayout, Dispatch>( result, setLayout, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayoutUnique", deleter );
+  }
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( Buffer srcBuffer, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const BufferImageCopy* pRegions, Dispatch const &d) const
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
-    d.vkCmdCopyBufferToImage( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkBufferImageCopy*>( pRegions ) );
+    return static_cast<Result>( d.vkCreateDescriptorUpdateTemplate( m_device, reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkDescriptorUpdateTemplate *>( pDescriptorUpdateTemplate ) ) );
   }
+
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( Buffer srcBuffer, Image dstImage, ImageLayout dstImageLayout, ArrayProxy<const BufferImageCopy> regions, Dispatch const &d ) const
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type Device::createDescriptorUpdateTemplate( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
   {
-    d.vkCmdCopyBufferToImage( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size() , reinterpret_cast<const VkBufferImageCopy*>( regions.data() ) );
+    VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate;
+    Result result = static_cast<Result>( d.vkCreateDescriptorUpdateTemplate( m_device, reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) ) );
+    return createResultValue( result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplate" );
   }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>>::type Device::createDescriptorUpdateTemplateUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate;
+    Result result = static_cast<Result>( d.vkCreateDescriptorUpdateTemplate( m_device, reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) ) );
+    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+    return createResultValue<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>( result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateUnique", deleter );
+  }
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( Image srcImage, ImageLayout srcImageLayout, Buffer dstBuffer, uint32_t regionCount, const BufferImageCopy* pRegions, Dispatch const &d) const
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
-    d.vkCmdCopyImageToBuffer( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkBuffer>( dstBuffer ), regionCount, reinterpret_cast<const VkBufferImageCopy*>( pRegions ) );
+    return static_cast<Result>( d.vkCreateDescriptorUpdateTemplateKHR( m_device, reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkDescriptorUpdateTemplate *>( pDescriptorUpdateTemplate ) ) );
   }
+
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( Image srcImage, ImageLayout srcImageLayout, Buffer dstBuffer, ArrayProxy<const BufferImageCopy> regions, Dispatch const &d ) const
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type Device::createDescriptorUpdateTemplateKHR( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
   {
-    d.vkCmdCopyImageToBuffer( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkBuffer>( dstBuffer ), regions.size() , reinterpret_cast<const VkBufferImageCopy*>( regions.data() ) );
+    VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate;
+    Result result = static_cast<Result>( d.vkCreateDescriptorUpdateTemplateKHR( m_device, reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) ) );
+    return createResultValue( result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateKHR" );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>>::type Device::createDescriptorUpdateTemplateKHRUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate;
+    Result result = static_cast<Result>( d.vkCreateDescriptorUpdateTemplateKHR( m_device, reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) ) );
+    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+    return createResultValue<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>( result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateKHRUnique", deleter );
+  }
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Event* pEvent, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkCreateEvent( m_device, reinterpret_cast<const VkEventCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkEvent *>( pEvent ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Event>::type Device::createEvent( const EventCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::Event event;
+    Result result = static_cast<Result>( d.vkCreateEvent( m_device, reinterpret_cast<const VkEventCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkEvent *>( &event ) ) );
+    return createResultValue( result, event, VULKAN_HPP_NAMESPACE_STRING "::Device::createEvent" );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Event, Dispatch>>::type Device::createEventUnique( const EventCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::Event event;
+    Result result = static_cast<Result>( d.vkCreateEvent( m_device, reinterpret_cast<const VkEventCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkEvent *>( &event ) ) );
+    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+    return createResultValue<VULKAN_HPP_NAMESPACE::Event, Dispatch>( result, event, VULKAN_HPP_NAMESPACE_STRING "::Device::createEventUnique", deleter );
+  }
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Fence* pFence, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkCreateFence( m_device, reinterpret_cast<const VkFenceCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkFence *>( pFence ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type Device::createFence( const FenceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::Fence fence;
+    Result result = static_cast<Result>( d.vkCreateFence( m_device, reinterpret_cast<const VkFenceCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkFence *>( &fence ) ) );
+    return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::createFence" );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type Device::createFenceUnique( const FenceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::Fence fence;
+    Result result = static_cast<Result>( d.vkCreateFence( m_device, reinterpret_cast<const VkFenceCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkFence *>( &fence ) ) );
+    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+    return createResultValue<VULKAN_HPP_NAMESPACE::Fence, Dispatch>( result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::createFenceUnique", deleter );
+  }
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Framebuffer* pFramebuffer, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkCreateFramebuffer( m_device, reinterpret_cast<const VkFramebufferCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkFramebuffer *>( pFramebuffer ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Framebuffer>::type Device::createFramebuffer( const FramebufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::Framebuffer framebuffer;
+    Result result = static_cast<Result>( d.vkCreateFramebuffer( m_device, reinterpret_cast<const VkFramebufferCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkFramebuffer *>( &framebuffer ) ) );
+    return createResultValue( result, framebuffer, VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebuffer" );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Framebuffer, Dispatch>>::type Device::createFramebufferUnique( const FramebufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::Framebuffer framebuffer;
+    Result result = static_cast<Result>( d.vkCreateFramebuffer( m_device, reinterpret_cast<const VkFramebufferCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkFramebuffer *>( &framebuffer ) ) );
+    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+    return createResultValue<VULKAN_HPP_NAMESPACE::Framebuffer, Dispatch>( result, framebuffer, VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebufferUnique", deleter );
+  }
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfoCount, reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( pCreateInfos ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkPipeline *>( pPipelines ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename PipelineAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline, PipelineAllocator>> Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    std::vector<Pipeline, PipelineAllocator> pipelines( createInfos.size() );
+    Result result = static_cast<Result>( d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkGraphicsPipelineCreateInfo*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( pipelines.data() ) ) );
+    return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+  }
+
+  template <typename PipelineAllocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type >
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline, PipelineAllocator>> Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, Optional<const AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d ) const
+  {
+    std::vector<Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator );
+    Result result = static_cast<Result>( d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkGraphicsPipelineCreateInfo*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( pipelines.data() ) ) );
+    return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+  }
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<Pipeline> Device::createGraphicsPipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    Pipeline pipeline;
+    Result result = static_cast<Result>( d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1, reinterpret_cast<const VkGraphicsPipelineCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( &pipeline ) ) );
+    return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipeline", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch, typename PipelineAllocator>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> Device::createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines;
+    std::vector<Pipeline> pipelines( createInfos.size() );
+    Result result = static_cast<Result>( d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkGraphicsPipelineCreateInfo*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( pipelines.data() ) ) );
+    if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )|| ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
+    {
+      uniquePipelines.reserve( createInfos.size() );
+      ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+      for ( size_t i=0; i < createInfos.size(); i++ )
+      {
+        uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) );
+      }
+    }
+    return createResultValue( result, std::move( uniquePipelines ), VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+  }
+
+  template <typename Dispatch, typename PipelineAllocator, typename B, typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type >
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> Device::createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, Optional<const AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d ) const
+  {
+    std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator );
+    std::vector<Pipeline> pipelines( createInfos.size() );
+    Result result = static_cast<Result>( d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkGraphicsPipelineCreateInfo*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( pipelines.data() ) ) );
+    if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )|| ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
+    {
+      uniquePipelines.reserve( createInfos.size() );
+      ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+      for ( size_t i=0; i < createInfos.size(); i++ )
+      {
+        uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) );
+      }
+    }
+    return createResultValue( result, std::move( uniquePipelines ), VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+  }
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<Pipeline, Dispatch>> Device::createGraphicsPipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    Pipeline pipeline;
+    Result result = static_cast<Result>( d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1, reinterpret_cast<const VkGraphicsPipelineCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( &pipeline ) ) );
+    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+    return createResultValue<Pipeline, Dispatch>( result, pipeline, VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelineUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT }, deleter );
+  }
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Image* pImage, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkCreateImage( m_device, reinterpret_cast<const VkImageCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkImage *>( pImage ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Image>::type Device::createImage( const ImageCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::Image image;
+    Result result = static_cast<Result>( d.vkCreateImage( m_device, reinterpret_cast<const VkImageCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkImage *>( &image ) ) );
+    return createResultValue( result, image, VULKAN_HPP_NAMESPACE_STRING "::Device::createImage" );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Image, Dispatch>>::type Device::createImageUnique( const ImageCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::Image image;
+    Result result = static_cast<Result>( d.vkCreateImage( m_device, reinterpret_cast<const VkImageCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkImage *>( &image ) ) );
+    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+    return createResultValue<VULKAN_HPP_NAMESPACE::Image, Dispatch>( result, image, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageUnique", deleter );
+  }
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ImageView* pView, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkCreateImageView( m_device, reinterpret_cast<const VkImageViewCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkImageView *>( pView ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageView>::type Device::createImageView( const ImageViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::ImageView view;
+    Result result = static_cast<Result>( d.vkCreateImageView( m_device, reinterpret_cast<const VkImageViewCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkImageView *>( &view ) ) );
+    return createResultValue( result, view, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageView" );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ImageView, Dispatch>>::type Device::createImageViewUnique( const ImageViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::ImageView view;
+    Result result = static_cast<Result>( d.vkCreateImageView( m_device, reinterpret_cast<const VkImageViewCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkImageView *>( &view ) ) );
+    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+    return createResultValue<VULKAN_HPP_NAMESPACE::ImageView, Dispatch>( result, view, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageViewUnique", deleter );
+  }
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createIndirectCommandsLayoutNV( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV* pIndirectCommandsLayout, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkCreateIndirectCommandsLayoutNV( m_device, reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkIndirectCommandsLayoutNV *>( pIndirectCommandsLayout ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV>::type Device::createIndirectCommandsLayoutNV( const IndirectCommandsLayoutCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout;
+    Result result = static_cast<Result>( d.vkCreateIndirectCommandsLayoutNV( m_device, reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkIndirectCommandsLayoutNV *>( &indirectCommandsLayout ) ) );
+    return createResultValue( result, indirectCommandsLayout, VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutNV" );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV, Dispatch>>::type Device::createIndirectCommandsLayoutNVUnique( const IndirectCommandsLayoutCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout;
+    Result result = static_cast<Result>( d.vkCreateIndirectCommandsLayoutNV( m_device, reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkIndirectCommandsLayoutNV *>( &indirectCommandsLayout ) ) );
+    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+    return createResultValue<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV, Dispatch>( result, indirectCommandsLayout, VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutNVUnique", deleter );
+  }
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::PipelineCache* pPipelineCache, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkCreatePipelineCache( m_device, reinterpret_cast<const VkPipelineCacheCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkPipelineCache *>( pPipelineCache ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineCache>::type Device::createPipelineCache( const PipelineCacheCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache;
+    Result result = static_cast<Result>( d.vkCreatePipelineCache( m_device, reinterpret_cast<const VkPipelineCacheCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkPipelineCache *>( &pipelineCache ) ) );
+    return createResultValue( result, pipelineCache, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCache" );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineCache, Dispatch>>::type Device::createPipelineCacheUnique( const PipelineCacheCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache;
+    Result result = static_cast<Result>( d.vkCreatePipelineCache( m_device, reinterpret_cast<const VkPipelineCacheCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkPipelineCache *>( &pipelineCache ) ) );
+    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+    return createResultValue<VULKAN_HPP_NAMESPACE::PipelineCache, Dispatch>( result, pipelineCache, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCacheUnique", deleter );
+  }
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::PipelineLayout* pPipelineLayout, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkCreatePipelineLayout( m_device, reinterpret_cast<const VkPipelineLayoutCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkPipelineLayout *>( pPipelineLayout ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineLayout>::type Device::createPipelineLayout( const PipelineLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout;
+    Result result = static_cast<Result>( d.vkCreatePipelineLayout( m_device, reinterpret_cast<const VkPipelineLayoutCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkPipelineLayout *>( &pipelineLayout ) ) );
+    return createResultValue( result, pipelineLayout, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayout" );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineLayout, Dispatch>>::type Device::createPipelineLayoutUnique( const PipelineLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout;
+    Result result = static_cast<Result>( d.vkCreatePipelineLayout( m_device, reinterpret_cast<const VkPipelineLayoutCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkPipelineLayout *>( &pipelineLayout ) ) );
+    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+    return createResultValue<VULKAN_HPP_NAMESPACE::PipelineLayout, Dispatch>( result, pipelineLayout, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayoutUnique", deleter );
+  }
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT* pPrivateDataSlot, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkCreatePrivateDataSlotEXT( m_device, reinterpret_cast<const VkPrivateDataSlotCreateInfoEXT *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkPrivateDataSlotEXT *>( pPrivateDataSlot ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT>::type Device::createPrivateDataSlotEXT( const PrivateDataSlotCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot;
+    Result result = static_cast<Result>( d.vkCreatePrivateDataSlotEXT( m_device, reinterpret_cast<const VkPrivateDataSlotCreateInfoEXT *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkPrivateDataSlotEXT *>( &privateDataSlot ) ) );
+    return createResultValue( result, privateDataSlot, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotEXT" );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT, Dispatch>>::type Device::createPrivateDataSlotEXTUnique( const PrivateDataSlotCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot;
+    Result result = static_cast<Result>( d.vkCreatePrivateDataSlotEXT( m_device, reinterpret_cast<const VkPrivateDataSlotCreateInfoEXT *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkPrivateDataSlotEXT *>( &privateDataSlot ) ) );
+    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+    return createResultValue<VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT, Dispatch>( result, privateDataSlot, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotEXTUnique", deleter );
+  }
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::QueryPool* pQueryPool, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkCreateQueryPool( m_device, reinterpret_cast<const VkQueryPoolCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkQueryPool *>( pQueryPool ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::QueryPool>::type Device::createQueryPool( const QueryPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::QueryPool queryPool;
+    Result result = static_cast<Result>( d.vkCreateQueryPool( m_device, reinterpret_cast<const VkQueryPoolCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkQueryPool *>( &queryPool ) ) );
+    return createResultValue( result, queryPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPool" );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::QueryPool, Dispatch>>::type Device::createQueryPoolUnique( const QueryPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::QueryPool queryPool;
+    Result result = static_cast<Result>( d.vkCreateQueryPool( m_device, reinterpret_cast<const VkQueryPoolCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkQueryPool *>( &queryPool ) ) );
+    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+    return createResultValue<VULKAN_HPP_NAMESPACE::QueryPool, Dispatch>( result, queryPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPoolUnique", deleter );
+  }
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkCreateRayTracingPipelinesKHR( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfoCount, reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( pCreateInfos ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkPipeline *>( pPipelines ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename PipelineAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline, PipelineAllocator>> Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    std::vector<Pipeline, PipelineAllocator> pipelines( createInfos.size() );
+    Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesKHR( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( pipelines.data() ) ) );
+    return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+  }
+
+  template <typename PipelineAllocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type >
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline, PipelineAllocator>> Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos, Optional<const AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d ) const
+  {
+    std::vector<Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator );
+    Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesKHR( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( pipelines.data() ) ) );
+    return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+  }
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<Pipeline> Device::createRayTracingPipelineKHR( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    Pipeline pipeline;
+    Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesKHR( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1, reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( &pipeline ) ) );
+    return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch, typename PipelineAllocator>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> Device::createRayTracingPipelinesKHRUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines;
+    std::vector<Pipeline> pipelines( createInfos.size() );
+    Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesKHR( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( pipelines.data() ) ) );
+    if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )|| ( result == VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR )|| ( result == VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR )|| ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
+    {
+      uniquePipelines.reserve( createInfos.size() );
+      ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+      for ( size_t i=0; i < createInfos.size(); i++ )
+      {
+        uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) );
+      }
+    }
+    return createResultValue( result, std::move( uniquePipelines ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+  }
+
+  template <typename Dispatch, typename PipelineAllocator, typename B, typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type >
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> Device::createRayTracingPipelinesKHRUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos, Optional<const AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d ) const
+  {
+    std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator );
+    std::vector<Pipeline> pipelines( createInfos.size() );
+    Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesKHR( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( pipelines.data() ) ) );
+    if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )|| ( result == VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR )|| ( result == VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR )|| ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
+    {
+      uniquePipelines.reserve( createInfos.size() );
+      ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+      for ( size_t i=0; i < createInfos.size(); i++ )
+      {
+        uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) );
+      }
+    }
+    return createResultValue( result, std::move( uniquePipelines ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+  }
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<Pipeline, Dispatch>> Device::createRayTracingPipelineKHRUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    Pipeline pipeline;
+    Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesKHR( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1, reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( &pipeline ) ) );
+    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+    return createResultValue<Pipeline, Dispatch>( result, pipeline, VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineKHRUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT }, deleter );
+  }
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkCreateRayTracingPipelinesNV( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfoCount, reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( pCreateInfos ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkPipeline *>( pPipelines ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename PipelineAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline, PipelineAllocator>> Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    std::vector<Pipeline, PipelineAllocator> pipelines( createInfos.size() );
+    Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesNV( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkRayTracingPipelineCreateInfoNV*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( pipelines.data() ) ) );
+    return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNV", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+  }
+
+  template <typename PipelineAllocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type >
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline, PipelineAllocator>> Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos, Optional<const AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d ) const
+  {
+    std::vector<Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator );
+    Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesNV( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkRayTracingPipelineCreateInfoNV*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( pipelines.data() ) ) );
+    return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNV", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+  }
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<Pipeline> Device::createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    Pipeline pipeline;
+    Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesNV( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1, reinterpret_cast<const VkRayTracingPipelineCreateInfoNV*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( &pipeline ) ) );
+    return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineNV", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch, typename PipelineAllocator>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> Device::createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines;
+    std::vector<Pipeline> pipelines( createInfos.size() );
+    Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesNV( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkRayTracingPipelineCreateInfoNV*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( pipelines.data() ) ) );
+    if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )|| ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
+    {
+      uniquePipelines.reserve( createInfos.size() );
+      ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+      for ( size_t i=0; i < createInfos.size(); i++ )
+      {
+        uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) );
+      }
+    }
+    return createResultValue( result, std::move( uniquePipelines ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+  }
+
+  template <typename Dispatch, typename PipelineAllocator, typename B, typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type >
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> Device::createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos, Optional<const AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d ) const
+  {
+    std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator );
+    std::vector<Pipeline> pipelines( createInfos.size() );
+    Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesNV( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkRayTracingPipelineCreateInfoNV*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( pipelines.data() ) ) );
+    if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )|| ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
+    {
+      uniquePipelines.reserve( createInfos.size() );
+      ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+      for ( size_t i=0; i < createInfos.size(); i++ )
+      {
+        uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) );
+      }
+    }
+    return createResultValue( result, std::move( uniquePipelines ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+  }
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<Pipeline, Dispatch>> Device::createRayTracingPipelineNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    Pipeline pipeline;
+    Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesNV( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1, reinterpret_cast<const VkRayTracingPipelineCreateInfoNV*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( &pipeline ) ) );
+    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+    return createResultValue<Pipeline, Dispatch>( result, pipeline, VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineNVUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT }, deleter );
+  }
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::RenderPass* pRenderPass, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkCreateRenderPass( m_device, reinterpret_cast<const VkRenderPassCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkRenderPass *>( pRenderPass ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type Device::createRenderPass( const RenderPassCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::RenderPass renderPass;
+    Result result = static_cast<Result>( d.vkCreateRenderPass( m_device, reinterpret_cast<const VkRenderPassCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkRenderPass *>( &renderPass ) ) );
+    return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass" );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type Device::createRenderPassUnique( const RenderPassCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::RenderPass renderPass;
+    Result result = static_cast<Result>( d.vkCreateRenderPass( m_device, reinterpret_cast<const VkRenderPassCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkRenderPass *>( &renderPass ) ) );
+    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+    return createResultValue<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>( result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPassUnique", deleter );
+  }
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::RenderPass* pRenderPass, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkCreateRenderPass2( m_device, reinterpret_cast<const VkRenderPassCreateInfo2 *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkRenderPass *>( pRenderPass ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type Device::createRenderPass2( const RenderPassCreateInfo2 & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::RenderPass renderPass;
+    Result result = static_cast<Result>( d.vkCreateRenderPass2( m_device, reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkRenderPass *>( &renderPass ) ) );
+    return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2" );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type Device::createRenderPass2Unique( const RenderPassCreateInfo2 & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::RenderPass renderPass;
+    Result result = static_cast<Result>( d.vkCreateRenderPass2( m_device, reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkRenderPass *>( &renderPass ) ) );
+    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+    return createResultValue<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>( result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2Unique", deleter );
+  }
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::RenderPass* pRenderPass, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkCreateRenderPass2KHR( m_device, reinterpret_cast<const VkRenderPassCreateInfo2 *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkRenderPass *>( pRenderPass ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type Device::createRenderPass2KHR( const RenderPassCreateInfo2 & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::RenderPass renderPass;
+    Result result = static_cast<Result>( d.vkCreateRenderPass2KHR( m_device, reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkRenderPass *>( &renderPass ) ) );
+    return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHR" );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type Device::createRenderPass2KHRUnique( const RenderPassCreateInfo2 & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::RenderPass renderPass;
+    Result result = static_cast<Result>( d.vkCreateRenderPass2KHR( m_device, reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkRenderPass *>( &renderPass ) ) );
+    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+    return createResultValue<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>( result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHRUnique", deleter );
+  }
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Sampler* pSampler, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkCreateSampler( m_device, reinterpret_cast<const VkSamplerCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkSampler *>( pSampler ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Sampler>::type Device::createSampler( const SamplerCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::Sampler sampler;
+    Result result = static_cast<Result>( d.vkCreateSampler( m_device, reinterpret_cast<const VkSamplerCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkSampler *>( &sampler ) ) );
+    return createResultValue( result, sampler, VULKAN_HPP_NAMESPACE_STRING "::Device::createSampler" );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Sampler, Dispatch>>::type Device::createSamplerUnique( const SamplerCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::Sampler sampler;
+    Result result = static_cast<Result>( d.vkCreateSampler( m_device, reinterpret_cast<const VkSamplerCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkSampler *>( &sampler ) ) );
+    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+    return createResultValue<VULKAN_HPP_NAMESPACE::Sampler, Dispatch>( result, sampler, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerUnique", deleter );
+  }
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion* pYcbcrConversion, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkCreateSamplerYcbcrConversion( m_device, reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkSamplerYcbcrConversion *>( pYcbcrConversion ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type Device::createSamplerYcbcrConversion( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion;
+    Result result = static_cast<Result>( d.vkCreateSamplerYcbcrConversion( m_device, reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkSamplerYcbcrConversion *>( &ycbcrConversion ) ) );
+    return createResultValue( result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversion" );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>>::type Device::createSamplerYcbcrConversionUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion;
+    Result result = static_cast<Result>( d.vkCreateSamplerYcbcrConversion( m_device, reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkSamplerYcbcrConversion *>( &ycbcrConversion ) ) );
+    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+    return createResultValue<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>( result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionUnique", deleter );
+  }
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion* pYcbcrConversion, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkCreateSamplerYcbcrConversionKHR( m_device, reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkSamplerYcbcrConversion *>( pYcbcrConversion ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type Device::createSamplerYcbcrConversionKHR( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion;
+    Result result = static_cast<Result>( d.vkCreateSamplerYcbcrConversionKHR( m_device, reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkSamplerYcbcrConversion *>( &ycbcrConversion ) ) );
+    return createResultValue( result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionKHR" );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>>::type Device::createSamplerYcbcrConversionKHRUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion;
+    Result result = static_cast<Result>( d.vkCreateSamplerYcbcrConversionKHR( m_device, reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkSamplerYcbcrConversion *>( &ycbcrConversion ) ) );
+    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+    return createResultValue<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>( result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionKHRUnique", deleter );
+  }
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Semaphore* pSemaphore, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkCreateSemaphore( m_device, reinterpret_cast<const VkSemaphoreCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkSemaphore *>( pSemaphore ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Semaphore>::type Device::createSemaphore( const SemaphoreCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::Semaphore semaphore;
+    Result result = static_cast<Result>( d.vkCreateSemaphore( m_device, reinterpret_cast<const VkSemaphoreCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkSemaphore *>( &semaphore ) ) );
+    return createResultValue( result, semaphore, VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphore" );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Semaphore, Dispatch>>::type Device::createSemaphoreUnique( const SemaphoreCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::Semaphore semaphore;
+    Result result = static_cast<Result>( d.vkCreateSemaphore( m_device, reinterpret_cast<const VkSemaphoreCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkSemaphore *>( &semaphore ) ) );
+    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+    return createResultValue<VULKAN_HPP_NAMESPACE::Semaphore, Dispatch>( result, semaphore, VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphoreUnique", deleter );
+  }
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ShaderModule* pShaderModule, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkCreateShaderModule( m_device, reinterpret_cast<const VkShaderModuleCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkShaderModule *>( pShaderModule ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ShaderModule>::type Device::createShaderModule( const ShaderModuleCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::ShaderModule shaderModule;
+    Result result = static_cast<Result>( d.vkCreateShaderModule( m_device, reinterpret_cast<const VkShaderModuleCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkShaderModule *>( &shaderModule ) ) );
+    return createResultValue( result, shaderModule, VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModule" );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderModule, Dispatch>>::type Device::createShaderModuleUnique( const ShaderModuleCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::ShaderModule shaderModule;
+    Result result = static_cast<Result>( d.vkCreateShaderModule( m_device, reinterpret_cast<const VkShaderModuleCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkShaderModule *>( &shaderModule ) ) );
+    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+    return createResultValue<VULKAN_HPP_NAMESPACE::ShaderModule, Dispatch>( result, shaderModule, VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModuleUnique", deleter );
+  }
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSharedSwapchainsKHR( uint32_t swapchainCount, const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkCreateSharedSwapchainsKHR( m_device, swapchainCount, reinterpret_cast<const VkSwapchainCreateInfoKHR *>( pCreateInfos ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkSwapchainKHR *>( pSwapchains ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename SwapchainKHRAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<SwapchainKHR, SwapchainKHRAllocator>>::type Device::createSharedSwapchainsKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    std::vector<SwapchainKHR, SwapchainKHRAllocator> swapchains( createInfos.size() );
+    Result result = static_cast<Result>( d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size(), reinterpret_cast<const VkSwapchainCreateInfoKHR*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSwapchainKHR*>( swapchains.data() ) ) );
+    return createResultValue( result, swapchains, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHR" );
+  }
+
+  template <typename SwapchainKHRAllocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, SwapchainKHR>::value, int>::type >
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<SwapchainKHR, SwapchainKHRAllocator>>::type Device::createSharedSwapchainsKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos, Optional<const AllocationCallbacks> allocator, SwapchainKHRAllocator & swapchainKHRAllocator, Dispatch const & d ) const
+  {
+    std::vector<SwapchainKHR, SwapchainKHRAllocator> swapchains( createInfos.size(), swapchainKHRAllocator );
+    Result result = static_cast<Result>( d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size(), reinterpret_cast<const VkSwapchainCreateInfoKHR*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSwapchainKHR*>( swapchains.data() ) ) );
+    return createResultValue( result, swapchains, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHR" );
+  }
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<SwapchainKHR>::type Device::createSharedSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    SwapchainKHR swapchain;
+    Result result = static_cast<Result>( d.vkCreateSharedSwapchainsKHR( m_device, 1, reinterpret_cast<const VkSwapchainCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSwapchainKHR*>( &swapchain ) ) );
+    return createResultValue( result, swapchain, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainKHR" );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch, typename SwapchainKHRAllocator>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<SwapchainKHR, Dispatch>, SwapchainKHRAllocator>>::type Device::createSharedSwapchainsKHRUnique( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    std::vector<UniqueHandle<SwapchainKHR, Dispatch>, SwapchainKHRAllocator> uniqueSwapchains;
+    std::vector<SwapchainKHR> swapchains( createInfos.size() );
+    Result result = static_cast<Result>( d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size(), reinterpret_cast<const VkSwapchainCreateInfoKHR*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSwapchainKHR*>( swapchains.data() ) ) );
+    if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+    {
+      uniqueSwapchains.reserve( createInfos.size() );
+      ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+      for ( size_t i=0; i < createInfos.size(); i++ )
+      {
+        uniqueSwapchains.push_back( UniqueHandle<SwapchainKHR, Dispatch>( swapchains[i], deleter ) );
+      }
+    }
+    return createResultValue( result, std::move( uniqueSwapchains ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" );
+  }
+
+  template <typename Dispatch, typename SwapchainKHRAllocator, typename B, typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<SwapchainKHR, Dispatch>>::value, int>::type >
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<SwapchainKHR, Dispatch>, SwapchainKHRAllocator>>::type Device::createSharedSwapchainsKHRUnique( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos, Optional<const AllocationCallbacks> allocator, SwapchainKHRAllocator & swapchainKHRAllocator, Dispatch const & d ) const
+  {
+    std::vector<UniqueHandle<SwapchainKHR, Dispatch>, SwapchainKHRAllocator> uniqueSwapchains( swapchainKHRAllocator );
+    std::vector<SwapchainKHR> swapchains( createInfos.size() );
+    Result result = static_cast<Result>( d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size(), reinterpret_cast<const VkSwapchainCreateInfoKHR*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSwapchainKHR*>( swapchains.data() ) ) );
+    if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+    {
+      uniqueSwapchains.reserve( createInfos.size() );
+      ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+      for ( size_t i=0; i < createInfos.size(); i++ )
+      {
+        uniqueSwapchains.push_back( UniqueHandle<SwapchainKHR, Dispatch>( swapchains[i], deleter ) );
+      }
+    }
+    return createResultValue( result, std::move( uniqueSwapchains ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" );
+  }
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SwapchainKHR, Dispatch>>::type Device::createSharedSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    SwapchainKHR swapchain;
+    Result result = static_cast<Result>( d.vkCreateSharedSwapchainsKHR( m_device, 1, reinterpret_cast<const VkSwapchainCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSwapchainKHR*>( &swapchain ) ) );
+    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+    return createResultValue<SwapchainKHR, Dispatch>( result, swapchain, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainKHRUnique", deleter );
+  }
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchain, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkCreateSwapchainKHR( m_device, reinterpret_cast<const VkSwapchainCreateInfoKHR *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkSwapchainKHR *>( pSwapchain ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SwapchainKHR>::type Device::createSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain;
+    Result result = static_cast<Result>( d.vkCreateSwapchainKHR( m_device, reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkSwapchainKHR *>( &swapchain ) ) );
+    return createResultValue( result, swapchain, VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHR" );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>>::type Device::createSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain;
+    Result result = static_cast<Result>( d.vkCreateSwapchainKHR( m_device, reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkSwapchainKHR *>( &swapchain ) ) );
+    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+    return createResultValue<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>( result, swapchain, VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHRUnique", deleter );
+  }
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ValidationCacheEXT* pValidationCache, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkCreateValidationCacheEXT( m_device, reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkValidationCacheEXT *>( pValidationCache ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ValidationCacheEXT>::type Device::createValidationCacheEXT( const ValidationCacheCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache;
+    Result result = static_cast<Result>( d.vkCreateValidationCacheEXT( m_device, reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkValidationCacheEXT *>( &validationCache ) ) );
+    return createResultValue( result, validationCache, VULKAN_HPP_NAMESPACE_STRING "::Device::createValidationCacheEXT" );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ValidationCacheEXT, Dispatch>>::type Device::createValidationCacheEXTUnique( const ValidationCacheCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache;
+    Result result = static_cast<Result>( d.vkCreateValidationCacheEXT( m_device, reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkValidationCacheEXT *>( &validationCache ) ) );
+    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+    return createResultValue<VULKAN_HPP_NAMESPACE::ValidationCacheEXT, Dispatch>( result, validationCache, VULKAN_HPP_NAMESPACE_STRING "::Device::createValidationCacheEXTUnique", deleter );
+  }
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT* pNameInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT *>( pNameInfo ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT & nameInfo, Dispatch const & d ) const
+  {
+    Result result = static_cast<Result>( d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT *>( &nameInfo ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectNameEXT" );
   }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( Buffer dstBuffer, DeviceSize dstOffset, DeviceSize dataSize, const void* pData, Dispatch const &d) const
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT* pTagInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
-    d.vkCmdUpdateBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), dstOffset, dataSize, pData );
+    return static_cast<Result>( d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT *>( pTagInfo ) ) );
   }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT & tagInfo, Dispatch const & d ) const
+  {
+    Result result = static_cast<Result>( d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT *>( &tagInfo ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectTagEXT" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkDeferredOperationJoinKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) );
+  }
+#else
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d ) const
+  {
+    Result result = static_cast<Result>( d.vkDeferredOperationJoinKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::deferredOperationJoinKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eThreadDoneKHR, VULKAN_HPP_NAMESPACE::Result::eThreadIdleKHR } );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyAccelerationStructureKHR( m_device, static_cast<VkAccelerationStructureKHR>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyAccelerationStructureKHR( m_device, static_cast<VkAccelerationStructureKHR>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyAccelerationStructureNV( m_device, static_cast<VkAccelerationStructureKHR>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyAccelerationStructureNV( m_device, static_cast<VkAccelerationStructureKHR>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    d.vkDestroyAccelerationStructureKHR( m_device, static_cast<VkAccelerationStructureKHR>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+#else
+    d.vkDestroyAccelerationStructureNV( m_device, static_cast<VkAccelerationStructureKHR>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    d.vkDestroyAccelerationStructureKHR( m_device, static_cast<VkAccelerationStructureKHR>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+#else
+    d.vkDestroyAccelerationStructureNV( m_device, static_cast<VkAccelerationStructureKHR>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyDeferredOperationKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyDeferredOperationKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyDeferredOperationKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyDeferredOperationKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyDescriptorSetLayout( m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyDescriptorSetLayout( m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyDescriptorSetLayout( m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyDescriptorSetLayout( m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyDescriptorUpdateTemplateKHR( m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyDescriptorUpdateTemplateKHR( m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyDevice( m_device, reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyDevice( m_device, reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyEvent( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyEvent( VULKAN_HPP_NAMESPACE::Event event, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Event event, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Fence fence, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Fence fence, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyImage( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyImage( VULKAN_HPP_NAMESPACE::Image image, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Image image, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyIndirectCommandsLayoutNV( m_device, static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyIndirectCommandsLayoutNV( m_device, static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyIndirectCommandsLayoutNV( m_device, static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyIndirectCommandsLayoutNV( m_device, static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyPrivateDataSlotEXT( m_device, static_cast<VkPrivateDataSlotEXT>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyPrivateDataSlotEXT( m_device, static_cast<VkPrivateDataSlotEXT>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyPrivateDataSlotEXT( m_device, static_cast<VkPrivateDataSlotEXT>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyPrivateDataSlotEXT( m_device, static_cast<VkPrivateDataSlotEXT>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroySamplerYcbcrConversion( m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroySamplerYcbcrConversion( m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroySamplerYcbcrConversionKHR( m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroySamplerYcbcrConversionKHR( m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroySamplerYcbcrConversion( m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroySamplerYcbcrConversion( m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyValidationCacheEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyValidationCacheEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyValidationCacheEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyValidationCacheEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitIdle( Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkDeviceWaitIdle( m_device ) );
+  }
+#else
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::waitIdle( Dispatch const & d ) const
+  {
+    Result result = static_cast<Result>( d.vkDeviceWaitIdle( m_device ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitIdle" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT* pDisplayPowerInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkDisplayPowerControlEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayPowerInfoEXT *>( pDisplayPowerInfo ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayPowerInfoEXT & displayPowerInfo, Dispatch const & d ) const
+  {
+    Result result = static_cast<Result>( d.vkDisplayPowerControlEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayPowerInfoEXT *>( &displayPowerInfo ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::displayPowerControlEXT" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::flushMappedMemoryRanges( uint32_t memoryRangeCount, const VULKAN_HPP_NAMESPACE::MappedMemoryRange* pMemoryRanges, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkFlushMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange *>( pMemoryRanges ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::flushMappedMemoryRanges( ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges, Dispatch const & d ) const
+  {
+    Result result = static_cast<Result>( d.vkFlushMappedMemoryRanges( m_device, memoryRanges.size(), reinterpret_cast<const VkMappedMemoryRange *>( memoryRanges.data() ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::flushMappedMemoryRanges" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkFreeCommandBuffers( m_device, static_cast<VkCommandPool>( commandPool ), commandBufferCount, reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkFreeCommandBuffers( m_device, static_cast<VkCommandPool>( commandPool ), commandBuffers.size(), reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkFreeCommandBuffers( m_device, static_cast<VkCommandPool>( commandPool ), commandBufferCount, reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkFreeCommandBuffers( m_device, static_cast<VkCommandPool>( commandPool ), commandBuffers.size(), reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkFreeDescriptorSets( m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSetCount, reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets, Dispatch const & d ) const
+  {
+    Result result = static_cast<Result>( d.vkFreeDescriptorSets( m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSets.size(), reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::freeDescriptorSets" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkFreeDescriptorSets( m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSetCount, reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets, Dispatch const & d ) const
+  {
+    Result result = static_cast<Result>( d.vkFreeDescriptorSets( m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSets.size(), reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::free" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::DeviceMemory memory, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE DeviceAddress Device::getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR* pInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<DeviceAddress>( d.vkGetAccelerationStructureDeviceAddressKHR( m_device, reinterpret_cast<const VkAccelerationStructureDeviceAddressInfoKHR *>( pInfo ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE DeviceAddress Device::getAccelerationStructureAddressKHR( const AccelerationStructureDeviceAddressInfoKHR & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    return d.vkGetAccelerationStructureDeviceAddressKHR( m_device, reinterpret_cast<const VkAccelerationStructureDeviceAddressInfoKHR *>( &info ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, size_t dataSize, void* pData, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkGetAccelerationStructureHandleNV( m_device, static_cast<VkAccelerationStructureKHR>( accelerationStructure ), dataSize, pData ) );
+  }
+
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
   template <typename T, typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( Buffer dstBuffer, DeviceSize dstOffset, ArrayProxy<const T> data, Dispatch const &d ) const
+  VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it.")
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::getAccelerationStructureHandleNV(  VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, ArrayProxy<T> const &data, Dispatch const &d  ) const
   {
-    d.vkCmdUpdateBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), dstOffset, data.size() * sizeof( T ) , reinterpret_cast<const void*>( data.data() ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+        Result result = static_cast<Result>( d.vkGetAccelerationStructureHandleNV( m_device, static_cast<VkAccelerationStructureKHR>( accelerationStructure ), data.size() * sizeof( T ) , reinterpret_cast<void*>( data.data() ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getAccelerationStructureHandleNV" );
 
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( Buffer dstBuffer, DeviceSize dstOffset, DeviceSize size, uint32_t data, Dispatch const &d) const
-  {
-    d.vkCmdFillBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), dstOffset, size, data );
   }
-#else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( Buffer dstBuffer, DeviceSize dstOffset, DeviceSize size, uint32_t data, Dispatch const &d ) const
-  {
-    d.vkCmdFillBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), dstOffset, size, data );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( Image image, ImageLayout imageLayout, const ClearColorValue* pColor, uint32_t rangeCount, const ImageSubresourceRange* pRanges, Dispatch const &d) const
+  template <typename T, typename Allocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<T,Allocator>>::type Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, size_t dataSize, Dispatch const & d ) const
   {
-    d.vkCmdClearColorImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearColorValue*>( pColor ), rangeCount, reinterpret_cast<const VkImageSubresourceRange*>( pRanges ) );
+    VULKAN_HPP_ASSERT( dataSize % sizeof( T ) == 0 );
+    std::vector<T,Allocator> data( dataSize / sizeof( T ) );
+    Result result = static_cast<Result>( d.vkGetAccelerationStructureHandleNV( m_device, static_cast<VkAccelerationStructureKHR>( accelerationStructure ), data.size(), reinterpret_cast<void*>( data.data() ) ) );
+    return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" );
   }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( Image image, ImageLayout imageLayout, const ClearColorValue & color, ArrayProxy<const ImageSubresourceRange> ranges, Dispatch const &d ) const
-  {
-    d.vkCmdClearColorImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearColorValue*>( &color ), ranges.size() , reinterpret_cast<const VkImageSubresourceRange*>( ranges.data() ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( Image image, ImageLayout imageLayout, const ClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const ImageSubresourceRange* pRanges, Dispatch const &d) const
-  {
-    d.vkCmdClearDepthStencilImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearDepthStencilValue*>( pDepthStencil ), rangeCount, reinterpret_cast<const VkImageSubresourceRange*>( pRanges ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( Image image, ImageLayout imageLayout, const ClearDepthStencilValue & depthStencil, ArrayProxy<const ImageSubresourceRange> ranges, Dispatch const &d ) const
-  {
-    d.vkCmdClearDepthStencilImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearDepthStencilValue*>( &depthStencil ), ranges.size() , reinterpret_cast<const VkImageSubresourceRange*>( ranges.data() ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( uint32_t attachmentCount, const ClearAttachment* pAttachments, uint32_t rectCount, const ClearRect* pRects, Dispatch const &d) const
-  {
-    d.vkCmdClearAttachments( m_commandBuffer, attachmentCount, reinterpret_cast<const VkClearAttachment*>( pAttachments ), rectCount, reinterpret_cast<const VkClearRect*>( pRects ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( ArrayProxy<const ClearAttachment> attachments, ArrayProxy<const ClearRect> rects, Dispatch const &d ) const
-  {
-    d.vkCmdClearAttachments( m_commandBuffer, attachments.size() , reinterpret_cast<const VkClearAttachment*>( attachments.data() ), rects.size() , reinterpret_cast<const VkClearRect*>( rects.data() ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::resolveImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const ImageResolve* pRegions, Dispatch const &d) const
-  {
-    d.vkCmdResolveImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkImageResolve*>( pRegions ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::resolveImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, ArrayProxy<const ImageResolve> regions, Dispatch const &d ) const
-  {
-    d.vkCmdResolveImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size() , reinterpret_cast<const VkImageResolve*>( regions.data() ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::setEvent( Event event, PipelineStageFlags stageMask, Dispatch const &d) const
-  {
-    d.vkCmdSetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
-  }
-#else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::setEvent( Event event, PipelineStageFlags stageMask, Dispatch const &d ) const
-  {
-    d.vkCmdSetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::resetEvent( Event event, PipelineStageFlags stageMask, Dispatch const &d) const
-  {
-    d.vkCmdResetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
-  }
-#else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::resetEvent( Event event, PipelineStageFlags stageMask, Dispatch const &d ) const
-  {
-    d.vkCmdResetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::waitEvents( uint32_t eventCount, const Event* pEvents, PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const &d) const
-  {
-    d.vkCmdWaitEvents( m_commandBuffer, eventCount, reinterpret_cast<const VkEvent*>( pEvents ), static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), memoryBarrierCount, reinterpret_cast<const VkMemoryBarrier*>( pMemoryBarriers ), bufferMemoryBarrierCount, reinterpret_cast<const VkBufferMemoryBarrier*>( pBufferMemoryBarriers ), imageMemoryBarrierCount, reinterpret_cast<const VkImageMemoryBarrier*>( pImageMemoryBarriers ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::waitEvents( ArrayProxy<const Event> events, PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, ArrayProxy<const MemoryBarrier> memoryBarriers, ArrayProxy<const BufferMemoryBarrier> bufferMemoryBarriers, ArrayProxy<const ImageMemoryBarrier> imageMemoryBarriers, Dispatch const &d ) const
-  {
-    d.vkCmdWaitEvents( m_commandBuffer, events.size() , reinterpret_cast<const VkEvent*>( events.data() ), static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), memoryBarriers.size() , reinterpret_cast<const VkMemoryBarrier*>( memoryBarriers.data() ), bufferMemoryBarriers.size() , reinterpret_cast<const VkBufferMemoryBarrier*>( bufferMemoryBarriers.data() ), imageMemoryBarriers.size() , reinterpret_cast<const VkImageMemoryBarrier*>( imageMemoryBarriers.data() ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, DependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const &d) const
-  {
-    d.vkCmdPipelineBarrier( m_commandBuffer, static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), static_cast<VkDependencyFlags>( dependencyFlags ), memoryBarrierCount, reinterpret_cast<const VkMemoryBarrier*>( pMemoryBarriers ), bufferMemoryBarrierCount, reinterpret_cast<const VkBufferMemoryBarrier*>( pBufferMemoryBarriers ), imageMemoryBarrierCount, reinterpret_cast<const VkImageMemoryBarrier*>( pImageMemoryBarriers ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, DependencyFlags dependencyFlags, ArrayProxy<const MemoryBarrier> memoryBarriers, ArrayProxy<const BufferMemoryBarrier> bufferMemoryBarriers, ArrayProxy<const ImageMemoryBarrier> imageMemoryBarriers, Dispatch const &d ) const
-  {
-    d.vkCmdPipelineBarrier( m_commandBuffer, static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), static_cast<VkDependencyFlags>( dependencyFlags ), memoryBarriers.size() , reinterpret_cast<const VkMemoryBarrier*>( memoryBarriers.data() ), bufferMemoryBarriers.size() , reinterpret_cast<const VkBufferMemoryBarrier*>( bufferMemoryBarriers.data() ), imageMemoryBarriers.size() , reinterpret_cast<const VkImageMemoryBarrier*>( imageMemoryBarriers.data() ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::beginQuery( QueryPool queryPool, uint32_t query, QueryControlFlags flags, Dispatch const &d) const
-  {
-    d.vkCmdBeginQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ) );
-  }
-#else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::beginQuery( QueryPool queryPool, uint32_t query, QueryControlFlags flags, Dispatch const &d ) const
-  {
-    d.vkCmdBeginQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::endQuery( QueryPool queryPool, uint32_t query, Dispatch const &d) const
-  {
-    d.vkCmdEndQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query );
-  }
-#else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::endQuery( QueryPool queryPool, uint32_t query, Dispatch const &d ) const
-  {
-    d.vkCmdEndQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( const ConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin, Dispatch const &d) const
-  {
-    d.vkCmdBeginConditionalRenderingEXT( m_commandBuffer, reinterpret_cast<const VkConditionalRenderingBeginInfoEXT*>( pConditionalRenderingBegin ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( const ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin, Dispatch const &d ) const
-  {
-    d.vkCmdBeginConditionalRenderingEXT( m_commandBuffer, reinterpret_cast<const VkConditionalRenderingBeginInfoEXT*>( &conditionalRenderingBegin ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::endConditionalRenderingEXT(Dispatch const &d) const
-  {
-    d.vkCmdEndConditionalRenderingEXT( m_commandBuffer );
-  }
-#else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::endConditionalRenderingEXT(Dispatch const &d ) const
-  {
-    d.vkCmdEndConditionalRenderingEXT( m_commandBuffer );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::resetQueryPool( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d) const
-  {
-    d.vkCmdResetQueryPool( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
-  }
-#else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::resetQueryPool( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d ) const
-  {
-    d.vkCmdResetQueryPool( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp( PipelineStageFlagBits pipelineStage, QueryPool queryPool, uint32_t query, Dispatch const &d) const
-  {
-    d.vkCmdWriteTimestamp( m_commandBuffer, static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkQueryPool>( queryPool ), query );
-  }
-#else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp( PipelineStageFlagBits pipelineStage, QueryPool queryPool, uint32_t query, Dispatch const &d ) const
-  {
-    d.vkCmdWriteTimestamp( m_commandBuffer, static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkQueryPool>( queryPool ), query );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Buffer dstBuffer, DeviceSize dstOffset, DeviceSize stride, QueryResultFlags flags, Dispatch const &d) const
-  {
-    d.vkCmdCopyQueryPoolResults( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, static_cast<VkBuffer>( dstBuffer ), dstOffset, stride, static_cast<VkQueryResultFlags>( flags ) );
-  }
-#else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Buffer dstBuffer, DeviceSize dstOffset, DeviceSize stride, QueryResultFlags flags, Dispatch const &d ) const
-  {
-    d.vkCmdCopyQueryPoolResults( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, static_cast<VkBuffer>( dstBuffer ), dstOffset, stride, static_cast<VkQueryResultFlags>( flags ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::pushConstants( PipelineLayout layout, ShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues, Dispatch const &d) const
-  {
-    d.vkCmdPushConstants( m_commandBuffer, static_cast<VkPipelineLayout>( layout ), static_cast<VkShaderStageFlags>( stageFlags ), offset, size, pValues );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
   template <typename T, typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::pushConstants( PipelineLayout layout, ShaderStageFlags stageFlags, uint32_t offset, ArrayProxy<const T> values, Dispatch const &d ) const
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<T>::type Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, Dispatch const & d ) const
   {
-    d.vkCmdPushConstants( m_commandBuffer, static_cast<VkPipelineLayout>( layout ), static_cast<VkShaderStageFlags>( stageFlags ), offset, values.size() * sizeof( T ) , reinterpret_cast<const void*>( values.data() ) );
+    T data;
+    Result result = static_cast<Result>( d.vkGetAccelerationStructureHandleNV( m_device, static_cast<VkAccelerationStructureKHR>( accelerationStructure ), sizeof( T ), reinterpret_cast<void*>( &data ) ) );
+    return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getAccelerationStructureMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoKHR* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetAccelerationStructureMemoryRequirementsKHR( m_device, reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoKHR *>( pInfo ), reinterpret_cast< VkMemoryRequirements2 *>( pMemoryRequirements ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getAccelerationStructureMemoryRequirementsKHR( const AccelerationStructureMemoryRequirementsInfoKHR & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
+    d.vkGetAccelerationStructureMemoryRequirementsKHR( m_device, reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoKHR *>( &info ), reinterpret_cast< VkMemoryRequirements2 *>( &memoryRequirements ) );
+    return memoryRequirements;
+  }
+
+  template <typename X, typename Y, typename... Z, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getAccelerationStructureMemoryRequirementsKHR( const AccelerationStructureMemoryRequirementsInfoKHR & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
+    d.vkGetAccelerationStructureMemoryRequirementsKHR( m_device, reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoKHR *>( &info ), reinterpret_cast< VkMemoryRequirements2 *>( &memoryRequirements ) );
+    return structureChain;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR* pMemoryRequirements, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetAccelerationStructureMemoryRequirementsNV( m_device, reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( pInfo ), reinterpret_cast< VkMemoryRequirements2KHR *>( pMemoryRequirements ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR Device::getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR memoryRequirements;
+    d.vkGetAccelerationStructureMemoryRequirementsNV( m_device, reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( &info ), reinterpret_cast< VkMemoryRequirements2KHR *>( &memoryRequirements ) );
+    return memoryRequirements;
+  }
+
+  template <typename X, typename Y, typename... Z, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR>();
+    d.vkGetAccelerationStructureMemoryRequirementsNV( m_device, reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( &info ), reinterpret_cast< VkMemoryRequirements2KHR *>( &memoryRequirements ) );
+    return structureChain;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer* buffer, VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID* pProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, buffer, reinterpret_cast< VkAndroidHardwareBufferPropertiesANDROID *>( pProperties ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>::type Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID properties;
+    Result result = static_cast<Result>( d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, buffer, reinterpret_cast< VkAndroidHardwareBufferPropertiesANDROID *>( &properties ) ) );
+    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" );
+  }
+
+  template <typename X, typename Y, typename... Z, typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d ) const
+  {
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID & properties = structureChain.template get<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>();
+    Result result = static_cast<Result>( d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, buffer, reinterpret_cast< VkAndroidHardwareBufferPropertiesANDROID *>( &properties ) ) );
+    return createResultValue( result, structureChain, VULKAN_HPP_NAMESPACE_STRING"::Device::getAndroidHardwareBufferPropertiesANDROID" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<DeviceAddress>( d.vkGetBufferDeviceAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddress( const BufferDeviceAddressInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    return d.vkGetBufferDeviceAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<DeviceAddress>( d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressEXT( const BufferDeviceAddressInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    return d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<DeviceAddress>( d.vkGetBufferDeviceAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressKHR( const BufferDeviceAddressInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    return d.vkGetBufferDeviceAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::MemoryRequirements* pMemoryRequirements, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetBufferMemoryRequirements( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast< VkMemoryRequirements *>( pMemoryRequirements ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements Device::getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements;
+    d.vkGetBufferMemoryRequirements( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast< VkMemoryRequirements *>( &memoryRequirements ) );
+    return memoryRequirements;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetBufferMemoryRequirements2( m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( pInfo ), reinterpret_cast< VkMemoryRequirements2 *>( pMemoryRequirements ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
+    d.vkGetBufferMemoryRequirements2( m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast< VkMemoryRequirements2 *>( &memoryRequirements ) );
+    return memoryRequirements;
+  }
+
+  template <typename X, typename Y, typename... Z, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
+    d.vkGetBufferMemoryRequirements2( m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast< VkMemoryRequirements2 *>( &memoryRequirements ) );
+    return structureChain;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetBufferMemoryRequirements2KHR( m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( pInfo ), reinterpret_cast< VkMemoryRequirements2 *>( pMemoryRequirements ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
+    d.vkGetBufferMemoryRequirements2KHR( m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast< VkMemoryRequirements2 *>( &memoryRequirements ) );
+    return memoryRequirements;
+  }
+
+  template <typename X, typename Y, typename... Z, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
+    d.vkGetBufferMemoryRequirements2KHR( m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast< VkMemoryRequirements2 *>( &memoryRequirements ) );
+    return structureChain;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return d.vkGetBufferOpaqueCaptureAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddress( const BufferDeviceAddressInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    return d.vkGetBufferOpaqueCaptureAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return d.vkGetBufferOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddressKHR( const BufferDeviceAddressInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    return d.vkGetBufferOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
   }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
   template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const RenderPassBeginInfo* pRenderPassBegin, SubpassContents contents, Dispatch const &d) const
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getCalibratedTimestampsEXT( uint32_t timestampCount, const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
   {
-    d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo*>( pRenderPassBegin ), static_cast<VkSubpassContents>( contents ) );
+    return static_cast<Result>( d.vkGetCalibratedTimestampsEXT( m_device, timestampCount, reinterpret_cast<const VkCalibratedTimestampInfoEXT*>( pTimestampInfos ), pTimestamps, pMaxDeviation ) );
   }
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
   template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const RenderPassBeginInfo & renderPassBegin, SubpassContents contents, Dispatch const &d ) const
-  {
-    d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo*>( &renderPassBegin ), static_cast<VkSubpassContents>( contents ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( SubpassContents contents, Dispatch const &d) const
-  {
-    d.vkCmdNextSubpass( m_commandBuffer, static_cast<VkSubpassContents>( contents ) );
-  }
-#else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( SubpassContents contents, Dispatch const &d ) const
-  {
-    d.vkCmdNextSubpass( m_commandBuffer, static_cast<VkSubpassContents>( contents ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::endRenderPass(Dispatch const &d) const
-  {
-    d.vkCmdEndRenderPass( m_commandBuffer );
-  }
-#else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::endRenderPass(Dispatch const &d ) const
-  {
-    d.vkCmdEndRenderPass( m_commandBuffer );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::executeCommands( uint32_t commandBufferCount, const CommandBuffer* pCommandBuffers, Dispatch const &d) const
-  {
-    d.vkCmdExecuteCommands( m_commandBuffer, commandBufferCount, reinterpret_cast<const VkCommandBuffer*>( pCommandBuffers ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::executeCommands( ArrayProxy<const CommandBuffer> commandBuffers, Dispatch const &d ) const
-  {
-    d.vkCmdExecuteCommands( m_commandBuffer, commandBuffers.size() , reinterpret_cast<const VkCommandBuffer*>( commandBuffers.data() ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const &d) const
-  {
-    d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT*>( pMarkerInfo ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const &d ) const
-  {
-    d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT*>( &markerInfo ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::debugMarkerEndEXT(Dispatch const &d) const
-  {
-    d.vkCmdDebugMarkerEndEXT( m_commandBuffer );
-  }
-#else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::debugMarkerEndEXT(Dispatch const &d ) const
-  {
-    d.vkCmdDebugMarkerEndEXT( m_commandBuffer );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const &d) const
-  {
-    d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT*>( pMarkerInfo ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const &d ) const
-  {
-    d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT*>( &markerInfo ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountAMD( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const
-  {
-    d.vkCmdDrawIndirectCountAMD( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, static_cast<VkBuffer>( countBuffer ), countBufferOffset, maxDrawCount, stride );
-  }
-#else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountAMD( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const
-  {
-    d.vkCmdDrawIndirectCountAMD( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, static_cast<VkBuffer>( countBuffer ), countBufferOffset, maxDrawCount, stride );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountAMD( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const
-  {
-    d.vkCmdDrawIndexedIndirectCountAMD( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, static_cast<VkBuffer>( countBuffer ), countBufferOffset, maxDrawCount, stride );
-  }
-#else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountAMD( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const
-  {
-    d.vkCmdDrawIndexedIndirectCountAMD( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, static_cast<VkBuffer>( countBuffer ), countBufferOffset, maxDrawCount, stride );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::processCommandsNVX( const CmdProcessCommandsInfoNVX* pProcessCommandsInfo, Dispatch const &d) const
-  {
-    d.vkCmdProcessCommandsNVX( m_commandBuffer, reinterpret_cast<const VkCmdProcessCommandsInfoNVX*>( pProcessCommandsInfo ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::processCommandsNVX( const CmdProcessCommandsInfoNVX & processCommandsInfo, Dispatch const &d ) const
-  {
-    d.vkCmdProcessCommandsNVX( m_commandBuffer, reinterpret_cast<const VkCmdProcessCommandsInfoNVX*>( &processCommandsInfo ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::reserveSpaceForCommandsNVX( const CmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo, Dispatch const &d) const
-  {
-    d.vkCmdReserveSpaceForCommandsNVX( m_commandBuffer, reinterpret_cast<const VkCmdReserveSpaceForCommandsInfoNVX*>( pReserveSpaceInfo ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::reserveSpaceForCommandsNVX( const CmdReserveSpaceForCommandsInfoNVX & reserveSpaceInfo, Dispatch const &d ) const
-  {
-    d.vkCmdReserveSpaceForCommandsNVX( m_commandBuffer, reinterpret_cast<const VkCmdReserveSpaceForCommandsInfoNVX*>( &reserveSpaceInfo ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( PipelineBindPoint pipelineBindPoint, PipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const WriteDescriptorSet* pDescriptorWrites, Dispatch const &d) const
-  {
-    d.vkCmdPushDescriptorSetKHR( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), set, descriptorWriteCount, reinterpret_cast<const VkWriteDescriptorSet*>( pDescriptorWrites ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( PipelineBindPoint pipelineBindPoint, PipelineLayout layout, uint32_t set, ArrayProxy<const WriteDescriptorSet> descriptorWrites, Dispatch const &d ) const
-  {
-    d.vkCmdPushDescriptorSetKHR( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), set, descriptorWrites.size() , reinterpret_cast<const VkWriteDescriptorSet*>( descriptorWrites.data() ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask, Dispatch const &d) const
-  {
-    d.vkCmdSetDeviceMask( m_commandBuffer, deviceMask );
-  }
-#else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask, Dispatch const &d ) const
-  {
-    d.vkCmdSetDeviceMask( m_commandBuffer, deviceMask );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHR( uint32_t deviceMask, Dispatch const &d) const
-  {
-    d.vkCmdSetDeviceMaskKHR( m_commandBuffer, deviceMask );
-  }
-#else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHR( uint32_t deviceMask, Dispatch const &d ) const
-  {
-    d.vkCmdSetDeviceMaskKHR( m_commandBuffer, deviceMask );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::dispatchBase( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d) const
-  {
-    d.vkCmdDispatchBase( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
-  }
-#else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::dispatchBase( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d ) const
-  {
-    d.vkCmdDispatchBase( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::dispatchBaseKHR( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d) const
-  {
-    d.vkCmdDispatchBaseKHR( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
-  }
-#else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::dispatchBaseKHR( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d ) const
-  {
-    d.vkCmdDispatchBaseKHR( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( DescriptorUpdateTemplate descriptorUpdateTemplate, PipelineLayout layout, uint32_t set, const void* pData, Dispatch const &d) const
-  {
-    d.vkCmdPushDescriptorSetWithTemplateKHR( m_commandBuffer, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), static_cast<VkPipelineLayout>( layout ), set, pData );
-  }
-#else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( DescriptorUpdateTemplate descriptorUpdateTemplate, PipelineLayout layout, uint32_t set, const void* pData, Dispatch const &d ) const
-  {
-    d.vkCmdPushDescriptorSetWithTemplateKHR( m_commandBuffer, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), static_cast<VkPipelineLayout>( layout ), set, pData );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, uint32_t viewportCount, const ViewportWScalingNV* pViewportWScalings, Dispatch const &d) const
-  {
-    d.vkCmdSetViewportWScalingNV( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewportWScalingNV*>( pViewportWScalings ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, ArrayProxy<const ViewportWScalingNV> viewportWScalings, Dispatch const &d ) const
-  {
-    d.vkCmdSetViewportWScalingNV( m_commandBuffer, firstViewport, viewportWScalings.size() , reinterpret_cast<const VkViewportWScalingNV*>( viewportWScalings.data() ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const Rect2D* pDiscardRectangles, Dispatch const &d) const
-  {
-    d.vkCmdSetDiscardRectangleEXT( m_commandBuffer, firstDiscardRectangle, discardRectangleCount, reinterpret_cast<const VkRect2D*>( pDiscardRectangles ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, ArrayProxy<const Rect2D> discardRectangles, Dispatch const &d ) const
-  {
-    d.vkCmdSetDiscardRectangleEXT( m_commandBuffer, firstDiscardRectangle, discardRectangles.size() , reinterpret_cast<const VkRect2D*>( discardRectangles.data() ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const SampleLocationsInfoEXT* pSampleLocationsInfo, Dispatch const &d) const
-  {
-    d.vkCmdSetSampleLocationsEXT( m_commandBuffer, reinterpret_cast<const VkSampleLocationsInfoEXT*>( pSampleLocationsInfo ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const SampleLocationsInfoEXT & sampleLocationsInfo, Dispatch const &d ) const
-  {
-    d.vkCmdSetSampleLocationsEXT( m_commandBuffer, reinterpret_cast<const VkSampleLocationsInfoEXT*>( &sampleLocationsInfo ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d) const
-  {
-    d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT*>( pLabelInfo ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d ) const
-  {
-    d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT*>( &labelInfo ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::endDebugUtilsLabelEXT(Dispatch const &d) const
-  {
-    d.vkCmdEndDebugUtilsLabelEXT( m_commandBuffer );
-  }
-#else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::endDebugUtilsLabelEXT(Dispatch const &d ) const
-  {
-    d.vkCmdEndDebugUtilsLabelEXT( m_commandBuffer );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d) const
-  {
-    d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT*>( pLabelInfo ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d ) const
-  {
-    d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT*>( &labelInfo ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarkerAMD( PipelineStageFlagBits pipelineStage, Buffer dstBuffer, DeviceSize dstOffset, uint32_t marker, Dispatch const &d) const
-  {
-    d.vkCmdWriteBufferMarkerAMD( m_commandBuffer, static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkBuffer>( dstBuffer ), dstOffset, marker );
-  }
-#else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarkerAMD( PipelineStageFlagBits pipelineStage, Buffer dstBuffer, DeviceSize dstOffset, uint32_t marker, Dispatch const &d ) const
-  {
-    d.vkCmdWriteBufferMarkerAMD( m_commandBuffer, static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkBuffer>( dstBuffer ), dstOffset, marker );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const RenderPassBeginInfo* pRenderPassBegin, const SubpassBeginInfoKHR* pSubpassBeginInfo, Dispatch const &d) const
-  {
-    d.vkCmdBeginRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo*>( pRenderPassBegin ), reinterpret_cast<const VkSubpassBeginInfoKHR*>( pSubpassBeginInfo ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const RenderPassBeginInfo & renderPassBegin, const SubpassBeginInfoKHR & subpassBeginInfo, Dispatch const &d ) const
-  {
-    d.vkCmdBeginRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo*>( &renderPassBegin ), reinterpret_cast<const VkSubpassBeginInfoKHR*>( &subpassBeginInfo ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const SubpassBeginInfoKHR* pSubpassBeginInfo, const SubpassEndInfoKHR* pSubpassEndInfo, Dispatch const &d) const
-  {
-    d.vkCmdNextSubpass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfoKHR*>( pSubpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfoKHR*>( pSubpassEndInfo ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const SubpassBeginInfoKHR & subpassBeginInfo, const SubpassEndInfoKHR & subpassEndInfo, Dispatch const &d ) const
-  {
-    d.vkCmdNextSubpass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfoKHR*>( &subpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfoKHR*>( &subpassEndInfo ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const SubpassEndInfoKHR* pSubpassEndInfo, Dispatch const &d) const
-  {
-    d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfoKHR*>( pSubpassEndInfo ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const SubpassEndInfoKHR & subpassEndInfo, Dispatch const &d ) const
-  {
-    d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfoKHR*>( &subpassEndInfo ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountKHR( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const
-  {
-    d.vkCmdDrawIndirectCountKHR( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, static_cast<VkBuffer>( countBuffer ), countBufferOffset, maxDrawCount, stride );
-  }
-#else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountKHR( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const
-  {
-    d.vkCmdDrawIndirectCountKHR( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, static_cast<VkBuffer>( countBuffer ), countBufferOffset, maxDrawCount, stride );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountKHR( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const
-  {
-    d.vkCmdDrawIndexedIndirectCountKHR( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, static_cast<VkBuffer>( countBuffer ), countBufferOffset, maxDrawCount, stride );
-  }
-#else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountKHR( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const
-  {
-    d.vkCmdDrawIndexedIndirectCountKHR( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, static_cast<VkBuffer>( countBuffer ), countBufferOffset, maxDrawCount, stride );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( const void* pCheckpointMarker, Dispatch const &d) const
-  {
-    d.vkCmdSetCheckpointNV( m_commandBuffer, pCheckpointMarker );
-  }
-#else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( const void* pCheckpointMarker, Dispatch const &d ) const
-  {
-    d.vkCmdSetCheckpointNV( m_commandBuffer, pCheckpointMarker );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, uint32_t bindingCount, const Buffer* pBuffers, const DeviceSize* pOffsets, const DeviceSize* pSizes, Dispatch const &d) const
-  {
-    d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer, firstBinding, bindingCount, reinterpret_cast<const VkBuffer*>( pBuffers ), pOffsets, pSizes );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, ArrayProxy<const Buffer> buffers, ArrayProxy<const DeviceSize> offsets, ArrayProxy<const DeviceSize> sizes, Dispatch const &d ) const
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type Device::getCalibratedTimestampsEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> const &timestampInfos, ArrayProxy<uint64_t> const &timestamps, Dispatch const &d ) const
   {
 #ifdef VULKAN_HPP_NO_EXCEPTIONS
-    VULKAN_HPP_ASSERT( buffers.size() == offsets.size() );
+    VULKAN_HPP_ASSERT( timestampInfos.size() == timestamps.size() );
 #else
-    if ( buffers.size() != offsets.size() )
+    if ( timestampInfos.size() != timestamps.size() )
     {
-      throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != offsets.size()" );
+      throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkDevice::getCalibratedTimestampsEXT: timestampInfos.size() != timestamps.size()" );
     }
-#endif  // VULKAN_HPP_NO_EXCEPTIONS
-#ifdef VULKAN_HPP_NO_EXCEPTIONS
-    VULKAN_HPP_ASSERT( buffers.size() == sizes.size() );
-#else
-    if ( buffers.size() != sizes.size() )
-    {
-      throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != sizes.size()" );
-    }
-#endif  // VULKAN_HPP_NO_EXCEPTIONS
-#ifdef VULKAN_HPP_NO_EXCEPTIONS
-    VULKAN_HPP_ASSERT( offsets.size() == sizes.size() );
-#else
-    if ( offsets.size() != sizes.size() )
-    {
-      throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindTransformFeedbackBuffersEXT: offsets.size() != sizes.size()" );
-    }
-#endif  // VULKAN_HPP_NO_EXCEPTIONS
-    d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer, firstBinding, buffers.size() , reinterpret_cast<const VkBuffer*>( buffers.data() ), offsets.data(), sizes.data() );
+#endif  /*VULKAN_HPP_NO_EXCEPTIONS*/
+    uint64_t maxDeviation;
+    Result result = static_cast<Result>( d.vkGetCalibratedTimestampsEXT( m_device, timestampInfos.size() , reinterpret_cast<const VkCalibratedTimestampInfoEXT*>( timestampInfos.data() ), timestamps.data(), &maxDeviation ) );
+    return createResultValue( result, maxDeviation, VULKAN_HPP_NAMESPACE_STRING"::Device::getCalibratedTimestampsEXT" );
   }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const Buffer* pCounterBuffers, const DeviceSize* pCounterBufferOffsets, Dispatch const &d) const
-  {
-    d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer, firstCounterBuffer, counterBufferCount, reinterpret_cast<const VkBuffer*>( pCounterBuffers ), pCounterBufferOffsets );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy<const Buffer> counterBuffers, ArrayProxy<const DeviceSize> counterBufferOffsets, Dispatch const &d ) const
-  {
-#ifdef VULKAN_HPP_NO_EXCEPTIONS
-    VULKAN_HPP_ASSERT( counterBuffers.size() == counterBufferOffsets.size() );
-#else
-    if ( counterBuffers.size() != counterBufferOffsets.size() )
-    {
-      throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::beginTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" );
-    }
-#endif  // VULKAN_HPP_NO_EXCEPTIONS
-    d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer, firstCounterBuffer, counterBuffers.size() , reinterpret_cast<const VkBuffer*>( counterBuffers.data() ), counterBufferOffsets.data() );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const Buffer* pCounterBuffers, const DeviceSize* pCounterBufferOffsets, Dispatch const &d) const
-  {
-    d.vkCmdEndTransformFeedbackEXT( m_commandBuffer, firstCounterBuffer, counterBufferCount, reinterpret_cast<const VkBuffer*>( pCounterBuffers ), pCounterBufferOffsets );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy<const Buffer> counterBuffers, ArrayProxy<const DeviceSize> counterBufferOffsets, Dispatch const &d ) const
-  {
-#ifdef VULKAN_HPP_NO_EXCEPTIONS
-    VULKAN_HPP_ASSERT( counterBuffers.size() == counterBufferOffsets.size() );
-#else
-    if ( counterBuffers.size() != counterBufferOffsets.size() )
-    {
-      throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::endTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" );
-    }
-#endif  // VULKAN_HPP_NO_EXCEPTIONS
-    d.vkCmdEndTransformFeedbackEXT( m_commandBuffer, firstCounterBuffer, counterBuffers.size() , reinterpret_cast<const VkBuffer*>( counterBuffers.data() ), counterBufferOffsets.data() );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
+#ifdef VK_ENABLE_BETA_EXTENSIONS
 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
   template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::beginQueryIndexedEXT( QueryPool queryPool, uint32_t query, QueryControlFlags flags, uint32_t index, Dispatch const &d) const
+  VULKAN_HPP_INLINE uint32_t Device::getDeferredOperationMaxConcurrencyKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
   {
-    d.vkCmdBeginQueryIndexedEXT( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ), index );
+    return d.vkGetDeferredOperationMaxConcurrencyKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) );
   }
 #else
   template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::beginQueryIndexedEXT( QueryPool queryPool, uint32_t query, QueryControlFlags flags, uint32_t index, Dispatch const &d ) const
+  VULKAN_HPP_INLINE uint32_t Device::getDeferredOperationMaxConcurrencyKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
   {
-    d.vkCmdBeginQueryIndexedEXT( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ), index );
+    return d.vkGetDeferredOperationMaxConcurrencyKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) );
   }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
 
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::endQueryIndexedEXT( QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const &d) const
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
-    d.vkCmdEndQueryIndexedEXT( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, index );
+    return static_cast<Result>( d.vkGetDeferredOperationResultKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) );
   }
 #else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::endQueryIndexedEXT( QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const &d ) const
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d ) const
   {
-    d.vkCmdEndQueryIndexedEXT( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, index );
+    Result result = static_cast<Result>( d.vkGetDeferredOperationResultKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getDeferredOperationResultKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo* pCreateInfo, VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport* pSupport, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetDescriptorSetLayoutSupport( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ), reinterpret_cast< VkDescriptorSetLayoutSupport *>( pSupport ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport Device::getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support;
+    d.vkGetDescriptorSetLayoutSupport( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast< VkDescriptorSetLayoutSupport *>( &support ) );
+    return support;
+  }
+
+  template <typename X, typename Y, typename... Z, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support = structureChain.template get<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport>();
+    d.vkGetDescriptorSetLayoutSupport( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast< VkDescriptorSetLayoutSupport *>( &support ) );
+    return structureChain;
   }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::drawIndirectByteCountEXT( uint32_t instanceCount, uint32_t firstInstance, Buffer counterBuffer, DeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride, Dispatch const &d) const
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo* pCreateInfo, VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport* pSupport, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
-    d.vkCmdDrawIndirectByteCountEXT( m_commandBuffer, instanceCount, firstInstance, static_cast<VkBuffer>( counterBuffer ), counterBufferOffset, counterOffset, vertexStride );
+    d.vkGetDescriptorSetLayoutSupportKHR( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ), reinterpret_cast< VkDescriptorSetLayoutSupport *>( pSupport ) );
   }
-#else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::drawIndirectByteCountEXT( uint32_t instanceCount, uint32_t firstInstance, Buffer counterBuffer, DeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride, Dispatch const &d ) const
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport Device::getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
   {
-    d.vkCmdDrawIndirectByteCountEXT( m_commandBuffer, instanceCount, firstInstance, static_cast<VkBuffer>( counterBuffer ), counterBufferOffset, counterOffset, vertexStride );
+    VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support;
+    d.vkGetDescriptorSetLayoutSupportKHR( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast< VkDescriptorSetLayoutSupport *>( &support ) );
+    return support;
+  }
+
+  template <typename X, typename Y, typename... Z, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support = structureChain.template get<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport>();
+    d.vkGetDescriptorSetLayoutSupportKHR( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast< VkDescriptorSetLayoutSupport *>( &support ) );
+    return structureChain;
   }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const Rect2D* pExclusiveScissors, Dispatch const &d) const
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getAccelerationStructureCompatibilityKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionKHR* version, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
-    d.vkCmdSetExclusiveScissorNV( m_commandBuffer, firstExclusiveScissor, exclusiveScissorCount, reinterpret_cast<const VkRect2D*>( pExclusiveScissors ) );
+    return static_cast<Result>( d.vkGetDeviceAccelerationStructureCompatibilityKHR( m_device, reinterpret_cast<const VkAccelerationStructureVersionKHR *>( version ) ) );
   }
+
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, ArrayProxy<const Rect2D> exclusiveScissors, Dispatch const &d ) const
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::getAccelerationStructureCompatibilityKHR( const AccelerationStructureVersionKHR & version, Dispatch const & d ) const
   {
-    d.vkCmdSetExclusiveScissorNV( m_commandBuffer, firstExclusiveScissor, exclusiveScissors.size() , reinterpret_cast<const VkRect2D*>( exclusiveScissors.data() ) );
+    Result result = static_cast<Result>( d.vkGetDeviceAccelerationStructureCompatibilityKHR( m_device, reinterpret_cast<const VkAccelerationStructureVersionKHR *>( &version ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureCompatibilityKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetDeviceGroupPeerMemoryFeatures( m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast< VkPeerMemoryFeatureFlags *>( pPeerMemoryFeatures ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures;
+    d.vkGetDeviceGroupPeerMemoryFeatures( m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast< VkPeerMemoryFeatureFlags *>( &peerMemoryFeatures ) );
+    return peerMemoryFeatures;
   }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::bindShadingRateImageNV( ImageView imageView, ImageLayout imageLayout, Dispatch const &d) const
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
-    d.vkCmdBindShadingRateImageNV( m_commandBuffer, static_cast<VkImageView>( imageView ), static_cast<VkImageLayout>( imageLayout ) );
+    d.vkGetDeviceGroupPeerMemoryFeaturesKHR( m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast< VkPeerMemoryFeatureFlags *>( pPeerMemoryFeatures ) );
   }
-#else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::bindShadingRateImageNV( ImageView imageView, ImageLayout imageLayout, Dispatch const &d ) const
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
   {
-    d.vkCmdBindShadingRateImageNV( m_commandBuffer, static_cast<VkImageView>( imageView ), static_cast<VkImageLayout>( imageLayout ) );
+    VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures;
+    d.vkGetDeviceGroupPeerMemoryFeaturesKHR( m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast< VkPeerMemoryFeatureFlags *>( &peerMemoryFeatures ) );
+    return peerMemoryFeatures;
   }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( uint32_t firstViewport, uint32_t viewportCount, const ShadingRatePaletteNV* pShadingRatePalettes, Dispatch const &d) const
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getGroupPresentCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
-    d.vkCmdSetViewportShadingRatePaletteNV( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkShadingRatePaletteNV*>( pShadingRatePalettes ) );
+    return static_cast<Result>( d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast< VkDeviceGroupPresentCapabilitiesKHR *>( pDeviceGroupPresentCapabilities ) ) );
   }
+
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( uint32_t firstViewport, ArrayProxy<const ShadingRatePaletteNV> shadingRatePalettes, Dispatch const &d ) const
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR>::type Device::getGroupPresentCapabilitiesKHR( Dispatch const & d ) const
   {
-    d.vkCmdSetViewportShadingRatePaletteNV( m_commandBuffer, firstViewport, shadingRatePalettes.size() , reinterpret_cast<const VkShadingRatePaletteNV*>( shadingRatePalettes.data() ) );
+    VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR deviceGroupPresentCapabilities;
+    Result result = static_cast<Result>( d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast< VkDeviceGroupPresentCapabilitiesKHR *>( &deviceGroupPresentCapabilities ) ) );
+    return createResultValue( result, deviceGroupPresentCapabilities, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupPresentCapabilitiesKHR" );
   }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV( CoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const CoarseSampleOrderCustomNV* pCustomSampleOrders, Dispatch const &d) const
-  {
-    d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer, static_cast<VkCoarseSampleOrderTypeNV>( sampleOrderType ), customSampleOrderCount, reinterpret_cast<const VkCoarseSampleOrderCustomNV*>( pCustomSampleOrders ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV( CoarseSampleOrderTypeNV sampleOrderType, ArrayProxy<const CoarseSampleOrderCustomNV> customSampleOrders, Dispatch const &d ) const
-  {
-    d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer, static_cast<VkCoarseSampleOrderTypeNV>( sampleOrderType ), customSampleOrders.size() , reinterpret_cast<const VkCoarseSampleOrderCustomNV*>( customSampleOrders.data() ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const &d) const
-  {
-    d.vkCmdDrawMeshTasksNV( m_commandBuffer, taskCount, firstTask );
-  }
-#else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const &d ) const
-  {
-    d.vkCmdDrawMeshTasksNV( m_commandBuffer, taskCount, firstTask );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectNV( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d) const
-  {
-    d.vkCmdDrawMeshTasksIndirectNV( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, drawCount, stride );
-  }
-#else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectNV( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d ) const
-  {
-    d.vkCmdDrawMeshTasksIndirectNV( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, drawCount, stride );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountNV( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const
-  {
-    d.vkCmdDrawMeshTasksIndirectCountNV( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, static_cast<VkBuffer>( countBuffer ), countBufferOffset, maxDrawCount, stride );
-  }
-#else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountNV( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const
-  {
-    d.vkCmdDrawMeshTasksIndirectCountNV( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, static_cast<VkBuffer>( countBuffer ), countBufferOffset, maxDrawCount, stride );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureNV( AccelerationStructureNV dst, AccelerationStructureNV src, CopyAccelerationStructureModeNV mode, Dispatch const &d) const
-  {
-    d.vkCmdCopyAccelerationStructureNV( m_commandBuffer, static_cast<VkAccelerationStructureNV>( dst ), static_cast<VkAccelerationStructureNV>( src ), static_cast<VkCopyAccelerationStructureModeNV>( mode ) );
-  }
-#else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureNV( AccelerationStructureNV dst, AccelerationStructureNV src, CopyAccelerationStructureModeNV mode, Dispatch const &d ) const
-  {
-    d.vkCmdCopyAccelerationStructureNV( m_commandBuffer, static_cast<VkAccelerationStructureNV>( dst ), static_cast<VkAccelerationStructureNV>( src ), static_cast<VkCopyAccelerationStructureModeNV>( mode ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( uint32_t accelerationStructureCount, const AccelerationStructureNV* pAccelerationStructures, QueryType queryType, QueryPool queryPool, uint32_t firstQuery, Dispatch const &d) const
-  {
-    d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer, accelerationStructureCount, reinterpret_cast<const VkAccelerationStructureNV*>( pAccelerationStructures ), static_cast<VkQueryType>( queryType ), static_cast<VkQueryPool>( queryPool ), firstQuery );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( ArrayProxy<const AccelerationStructureNV> accelerationStructures, QueryType queryType, QueryPool queryPool, uint32_t firstQuery, Dispatch const &d ) const
-  {
-    d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer, accelerationStructures.size() , reinterpret_cast<const VkAccelerationStructureNV*>( accelerationStructures.data() ), static_cast<VkQueryType>( queryType ), static_cast<VkQueryPool>( queryPool ), firstQuery );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const AccelerationStructureInfoNV* pInfo, Buffer instanceData, DeviceSize instanceOffset, Bool32 update, AccelerationStructureNV dst, AccelerationStructureNV src, Buffer scratch, DeviceSize scratchOffset, Dispatch const &d) const
-  {
-    d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, reinterpret_cast<const VkAccelerationStructureInfoNV*>( pInfo ), static_cast<VkBuffer>( instanceData ), instanceOffset, update, static_cast<VkAccelerationStructureNV>( dst ), static_cast<VkAccelerationStructureNV>( src ), static_cast<VkBuffer>( scratch ), scratchOffset );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const AccelerationStructureInfoNV & info, Buffer instanceData, DeviceSize instanceOffset, Bool32 update, AccelerationStructureNV dst, AccelerationStructureNV src, Buffer scratch, DeviceSize scratchOffset, Dispatch const &d ) const
-  {
-    d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, reinterpret_cast<const VkAccelerationStructureInfoNV*>( &info ), static_cast<VkBuffer>( instanceData ), instanceOffset, update, static_cast<VkAccelerationStructureNV>( dst ), static_cast<VkAccelerationStructureNV>( src ), static_cast<VkBuffer>( scratch ), scratchOffset );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::traceRaysNV( Buffer raygenShaderBindingTableBuffer, DeviceSize raygenShaderBindingOffset, Buffer missShaderBindingTableBuffer, DeviceSize missShaderBindingOffset, DeviceSize missShaderBindingStride, Buffer hitShaderBindingTableBuffer, DeviceSize hitShaderBindingOffset, DeviceSize hitShaderBindingStride, Buffer callableShaderBindingTableBuffer, DeviceSize callableShaderBindingOffset, DeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d) const
-  {
-    d.vkCmdTraceRaysNV( m_commandBuffer, static_cast<VkBuffer>( raygenShaderBindingTableBuffer ), raygenShaderBindingOffset, static_cast<VkBuffer>( missShaderBindingTableBuffer ), missShaderBindingOffset, missShaderBindingStride, static_cast<VkBuffer>( hitShaderBindingTableBuffer ), hitShaderBindingOffset, hitShaderBindingStride, static_cast<VkBuffer>( callableShaderBindingTableBuffer ), callableShaderBindingOffset, callableShaderBindingStride, width, height, depth );
-  }
-#else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void CommandBuffer::traceRaysNV( Buffer raygenShaderBindingTableBuffer, DeviceSize raygenShaderBindingOffset, Buffer missShaderBindingTableBuffer, DeviceSize missShaderBindingOffset, DeviceSize missShaderBindingStride, Buffer hitShaderBindingTableBuffer, DeviceSize hitShaderBindingOffset, DeviceSize hitShaderBindingStride, Buffer callableShaderBindingTableBuffer, DeviceSize callableShaderBindingOffset, DeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d ) const
-  {
-    d.vkCmdTraceRaysNV( m_commandBuffer, static_cast<VkBuffer>( raygenShaderBindingTableBuffer ), raygenShaderBindingOffset, static_cast<VkBuffer>( missShaderBindingTableBuffer ), missShaderBindingOffset, missShaderBindingStride, static_cast<VkBuffer>( hitShaderBindingTableBuffer ), hitShaderBindingOffset, hitShaderBindingStride, static_cast<VkBuffer>( callableShaderBindingTableBuffer ), callableShaderBindingOffset, callableShaderBindingStride, width, height, depth );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  struct SubmitInfo
-  {
-    SubmitInfo( uint32_t waitSemaphoreCount_ = 0,
-                const Semaphore* pWaitSemaphores_ = nullptr,
-                const PipelineStageFlags* pWaitDstStageMask_ = nullptr,
-                uint32_t commandBufferCount_ = 0,
-                const CommandBuffer* pCommandBuffers_ = nullptr,
-                uint32_t signalSemaphoreCount_ = 0,
-                const Semaphore* pSignalSemaphores_ = nullptr )
-      : waitSemaphoreCount( waitSemaphoreCount_ )
-      , pWaitSemaphores( pWaitSemaphores_ )
-      , pWaitDstStageMask( pWaitDstStageMask_ )
-      , commandBufferCount( commandBufferCount_ )
-      , pCommandBuffers( pCommandBuffers_ )
-      , signalSemaphoreCount( signalSemaphoreCount_ )
-      , pSignalSemaphores( pSignalSemaphores_ )
-    {
-    }
-
-    SubmitInfo( VkSubmitInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( SubmitInfo ) );
-    }
-
-    SubmitInfo& operator=( VkSubmitInfo const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( SubmitInfo ) );
-      return *this;
-    }
-    SubmitInfo& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    SubmitInfo& setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ )
-    {
-      waitSemaphoreCount = waitSemaphoreCount_;
-      return *this;
-    }
-
-    SubmitInfo& setPWaitSemaphores( const Semaphore* pWaitSemaphores_ )
-    {
-      pWaitSemaphores = pWaitSemaphores_;
-      return *this;
-    }
-
-    SubmitInfo& setPWaitDstStageMask( const PipelineStageFlags* pWaitDstStageMask_ )
-    {
-      pWaitDstStageMask = pWaitDstStageMask_;
-      return *this;
-    }
-
-    SubmitInfo& setCommandBufferCount( uint32_t commandBufferCount_ )
-    {
-      commandBufferCount = commandBufferCount_;
-      return *this;
-    }
-
-    SubmitInfo& setPCommandBuffers( const CommandBuffer* pCommandBuffers_ )
-    {
-      pCommandBuffers = pCommandBuffers_;
-      return *this;
-    }
-
-    SubmitInfo& setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ )
-    {
-      signalSemaphoreCount = signalSemaphoreCount_;
-      return *this;
-    }
-
-    SubmitInfo& setPSignalSemaphores( const Semaphore* pSignalSemaphores_ )
-    {
-      pSignalSemaphores = pSignalSemaphores_;
-      return *this;
-    }
-
-    operator VkSubmitInfo const&() const
-    {
-      return *reinterpret_cast<const VkSubmitInfo*>(this);
-    }
-
-    operator VkSubmitInfo &()
-    {
-      return *reinterpret_cast<VkSubmitInfo*>(this);
-    }
-
-    bool operator==( SubmitInfo const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( waitSemaphoreCount == rhs.waitSemaphoreCount )
-          && ( pWaitSemaphores == rhs.pWaitSemaphores )
-          && ( pWaitDstStageMask == rhs.pWaitDstStageMask )
-          && ( commandBufferCount == rhs.commandBufferCount )
-          && ( pCommandBuffers == rhs.pCommandBuffers )
-          && ( signalSemaphoreCount == rhs.signalSemaphoreCount )
-          && ( pSignalSemaphores == rhs.pSignalSemaphores );
-    }
-
-    bool operator!=( SubmitInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eSubmitInfo;
-
-  public:
-    const void* pNext = nullptr;
-    uint32_t waitSemaphoreCount;
-    const Semaphore* pWaitSemaphores;
-    const PipelineStageFlags* pWaitDstStageMask;
-    uint32_t commandBufferCount;
-    const CommandBuffer* pCommandBuffers;
-    uint32_t signalSemaphoreCount;
-    const Semaphore* pSignalSemaphores;
-  };
-  static_assert( sizeof( SubmitInfo ) == sizeof( VkSubmitInfo ), "struct and wrapper have different size!" );
-
-  class Queue
-  {
-  public:
-    VULKAN_HPP_CONSTEXPR Queue()
-      : m_queue(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_CONSTEXPR Queue( std::nullptr_t )
-      : m_queue(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT Queue( VkQueue queue )
-      : m_queue( queue )
-    {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
-    Queue & operator=(VkQueue queue)
-    {
-      m_queue = queue;
-      return *this; 
-    }
-#endif
-
-    Queue & operator=( std::nullptr_t )
-    {
-      m_queue = VK_NULL_HANDLE;
-      return *this;
-    }
-
-    bool operator==( Queue const & rhs ) const
-    {
-      return m_queue == rhs.m_queue;
-    }
-
-    bool operator!=(Queue const & rhs ) const
-    {
-      return m_queue != rhs.m_queue;
-    }
-
-    bool operator<(Queue const & rhs ) const
-    {
-      return m_queue < rhs.m_queue;
-    }
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result submit( uint32_t submitCount, const SubmitInfo* pSubmits, Fence fence, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<void>::type submit( ArrayProxy<const SubmitInfo> submits, Fence fence, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result waitIdle(Dispatch const &d = Dispatch() ) const;
-#else
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<void>::type waitIdle(Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result bindSparse( uint32_t bindInfoCount, const BindSparseInfo* pBindInfo, Fence fence, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<void>::type bindSparse( ArrayProxy<const BindSparseInfo> bindInfo, Fence fence, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result presentKHR( const PresentInfoKHR* pPresentInfo, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result presentKHR( const PresentInfoKHR & presentInfo, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void endDebugUtilsLabelEXT(Dispatch const &d = Dispatch() ) const;
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void getCheckpointDataNV( uint32_t* pCheckpointDataCount, CheckpointDataNV* pCheckpointData, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Allocator = std::allocator<CheckpointDataNV>, typename Dispatch = DispatchLoaderStatic> 
-    std::vector<CheckpointDataNV,Allocator> getCheckpointDataNV(Dispatch const &d = Dispatch() ) const;
-    template <typename Allocator = std::allocator<CheckpointDataNV>, typename Dispatch = DispatchLoaderStatic> 
-    std::vector<CheckpointDataNV,Allocator> getCheckpointDataNV(Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkQueue() const
-    {
-      return m_queue;
-    }
-
-    explicit operator bool() const
-    {
-      return m_queue != VK_NULL_HANDLE;
-    }
-
-    bool operator!() const
-    {
-      return m_queue == VK_NULL_HANDLE;
-    }
-
-  private:
-    VkQueue m_queue;
-  };
-
-  static_assert( sizeof( Queue ) == sizeof( VkQueue ), "handle and wrapper have different size!" );
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Queue::submit( uint32_t submitCount, const SubmitInfo* pSubmits, Fence fence, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkQueueSubmit( m_queue, submitCount, reinterpret_cast<const VkSubmitInfo*>( pSubmits ), static_cast<VkFence>( fence ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<void>::type Queue::submit( ArrayProxy<const SubmitInfo> submits, Fence fence, Dispatch const &d ) const
-  {
-    Result result = static_cast<Result>( d.vkQueueSubmit( m_queue, submits.size() , reinterpret_cast<const VkSubmitInfo*>( submits.data() ), static_cast<VkFence>( fence ) ) );
-    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Queue::submit" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Queue::waitIdle(Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkQueueWaitIdle( m_queue ) );
-  }
-#else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<void>::type Queue::waitIdle(Dispatch const &d ) const
-  {
-    Result result = static_cast<Result>( d.vkQueueWaitIdle( m_queue ) );
-    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Queue::waitIdle" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Queue::bindSparse( uint32_t bindInfoCount, const BindSparseInfo* pBindInfo, Fence fence, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkQueueBindSparse( m_queue, bindInfoCount, reinterpret_cast<const VkBindSparseInfo*>( pBindInfo ), static_cast<VkFence>( fence ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<void>::type Queue::bindSparse( ArrayProxy<const BindSparseInfo> bindInfo, Fence fence, Dispatch const &d ) const
-  {
-    Result result = static_cast<Result>( d.vkQueueBindSparse( m_queue, bindInfo.size() , reinterpret_cast<const VkBindSparseInfo*>( bindInfo.data() ), static_cast<VkFence>( fence ) ) );
-    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Queue::bindSparse" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Queue::presentKHR( const PresentInfoKHR* pPresentInfo, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR*>( pPresentInfo ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Queue::presentKHR( const PresentInfoKHR & presentInfo, Dispatch const &d ) const
-  {
-    Result result = static_cast<Result>( d.vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR*>( &presentInfo ) ) );
-    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Queue::presentKHR", { Result::eSuccess, Result::eSuboptimalKHR } );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d) const
-  {
-    d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT*>( pLabelInfo ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d ) const
-  {
-    d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT*>( &labelInfo ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Queue::endDebugUtilsLabelEXT(Dispatch const &d) const
-  {
-    d.vkQueueEndDebugUtilsLabelEXT( m_queue );
-  }
-#else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Queue::endDebugUtilsLabelEXT(Dispatch const &d ) const
-  {
-    d.vkQueueEndDebugUtilsLabelEXT( m_queue );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d) const
-  {
-    d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT*>( pLabelInfo ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d ) const
-  {
-    d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT*>( &labelInfo ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Queue::getCheckpointDataNV( uint32_t* pCheckpointDataCount, CheckpointDataNV* pCheckpointData, Dispatch const &d) const
-  {
-    d.vkGetQueueCheckpointDataNV( m_queue, pCheckpointDataCount, reinterpret_cast<VkCheckpointDataNV*>( pCheckpointData ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE std::vector<CheckpointDataNV,Allocator> Queue::getCheckpointDataNV(Dispatch const &d ) const
-  {
-    std::vector<CheckpointDataNV,Allocator> checkpointData;
-    uint32_t checkpointDataCount;
-    d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, nullptr );
-    checkpointData.resize( checkpointDataCount );
-    d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointDataNV*>( checkpointData.data() ) );
-    return checkpointData;
-  }
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE std::vector<CheckpointDataNV,Allocator> Queue::getCheckpointDataNV(Allocator const& vectorAllocator, Dispatch const &d ) const
-  {
-    std::vector<CheckpointDataNV,Allocator> checkpointData( vectorAllocator );
-    uint32_t checkpointDataCount;
-    d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, nullptr );
-    checkpointData.resize( checkpointDataCount );
-    d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointDataNV*>( checkpointData.data() ) );
-    return checkpointData;
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-  class Device;
-
-  template <typename Dispatch> class UniqueHandleTraits<AccelerationStructureNV,Dispatch> {public: using deleter = ObjectDestroy<Device,Dispatch>; };
-  using UniqueAccelerationStructureNV = UniqueHandle<AccelerationStructureNV,DispatchLoaderStatic>;
-  template <typename Dispatch> class UniqueHandleTraits<Buffer,Dispatch> {public: using deleter = ObjectDestroy<Device,Dispatch>; };
-  using UniqueBuffer = UniqueHandle<Buffer,DispatchLoaderStatic>;
-  template <typename Dispatch> class UniqueHandleTraits<BufferView,Dispatch> {public: using deleter = ObjectDestroy<Device,Dispatch>; };
-  using UniqueBufferView = UniqueHandle<BufferView,DispatchLoaderStatic>;
-  template <typename Dispatch> class UniqueHandleTraits<CommandBuffer,Dispatch> {public: using deleter = PoolFree<Device, CommandPool,Dispatch>; };
-  using UniqueCommandBuffer = UniqueHandle<CommandBuffer,DispatchLoaderStatic>;
-  template <typename Dispatch> class UniqueHandleTraits<CommandPool,Dispatch> {public: using deleter = ObjectDestroy<Device,Dispatch>; };
-  using UniqueCommandPool = UniqueHandle<CommandPool,DispatchLoaderStatic>;
-  template <typename Dispatch> class UniqueHandleTraits<DescriptorPool,Dispatch> {public: using deleter = ObjectDestroy<Device,Dispatch>; };
-  using UniqueDescriptorPool = UniqueHandle<DescriptorPool,DispatchLoaderStatic>;
-  template <typename Dispatch> class UniqueHandleTraits<DescriptorSet,Dispatch> {public: using deleter = PoolFree<Device, DescriptorPool,Dispatch>; };
-  using UniqueDescriptorSet = UniqueHandle<DescriptorSet,DispatchLoaderStatic>;
-  template <typename Dispatch> class UniqueHandleTraits<DescriptorSetLayout,Dispatch> {public: using deleter = ObjectDestroy<Device,Dispatch>; };
-  using UniqueDescriptorSetLayout = UniqueHandle<DescriptorSetLayout,DispatchLoaderStatic>;
-  template <typename Dispatch> class UniqueHandleTraits<DescriptorUpdateTemplate,Dispatch> {public: using deleter = ObjectDestroy<Device,Dispatch>; };
-  using UniqueDescriptorUpdateTemplate = UniqueHandle<DescriptorUpdateTemplate,DispatchLoaderStatic>;
-  template <typename Dispatch> class UniqueHandleTraits<DeviceMemory,Dispatch> {public: using deleter = ObjectFree<Device,Dispatch>; };
-  using UniqueDeviceMemory = UniqueHandle<DeviceMemory,DispatchLoaderStatic>;
-  template <typename Dispatch> class UniqueHandleTraits<Event,Dispatch> {public: using deleter = ObjectDestroy<Device,Dispatch>; };
-  using UniqueEvent = UniqueHandle<Event,DispatchLoaderStatic>;
-  template <typename Dispatch> class UniqueHandleTraits<Fence,Dispatch> {public: using deleter = ObjectDestroy<Device,Dispatch>; };
-  using UniqueFence = UniqueHandle<Fence,DispatchLoaderStatic>;
-  template <typename Dispatch> class UniqueHandleTraits<Framebuffer,Dispatch> {public: using deleter = ObjectDestroy<Device,Dispatch>; };
-  using UniqueFramebuffer = UniqueHandle<Framebuffer,DispatchLoaderStatic>;
-  template <typename Dispatch> class UniqueHandleTraits<Image,Dispatch> {public: using deleter = ObjectDestroy<Device,Dispatch>; };
-  using UniqueImage = UniqueHandle<Image,DispatchLoaderStatic>;
-  template <typename Dispatch> class UniqueHandleTraits<ImageView,Dispatch> {public: using deleter = ObjectDestroy<Device,Dispatch>; };
-  using UniqueImageView = UniqueHandle<ImageView,DispatchLoaderStatic>;
-  template <typename Dispatch> class UniqueHandleTraits<IndirectCommandsLayoutNVX,Dispatch> {public: using deleter = ObjectDestroy<Device,Dispatch>; };
-  using UniqueIndirectCommandsLayoutNVX = UniqueHandle<IndirectCommandsLayoutNVX,DispatchLoaderStatic>;
-  template <typename Dispatch> class UniqueHandleTraits<ObjectTableNVX,Dispatch> {public: using deleter = ObjectDestroy<Device,Dispatch>; };
-  using UniqueObjectTableNVX = UniqueHandle<ObjectTableNVX,DispatchLoaderStatic>;
-  template <typename Dispatch> class UniqueHandleTraits<Pipeline,Dispatch> {public: using deleter = ObjectDestroy<Device,Dispatch>; };
-  using UniquePipeline = UniqueHandle<Pipeline,DispatchLoaderStatic>;
-  template <typename Dispatch> class UniqueHandleTraits<PipelineCache,Dispatch> {public: using deleter = ObjectDestroy<Device,Dispatch>; };
-  using UniquePipelineCache = UniqueHandle<PipelineCache,DispatchLoaderStatic>;
-  template <typename Dispatch> class UniqueHandleTraits<PipelineLayout,Dispatch> {public: using deleter = ObjectDestroy<Device,Dispatch>; };
-  using UniquePipelineLayout = UniqueHandle<PipelineLayout,DispatchLoaderStatic>;
-  template <typename Dispatch> class UniqueHandleTraits<QueryPool,Dispatch> {public: using deleter = ObjectDestroy<Device,Dispatch>; };
-  using UniqueQueryPool = UniqueHandle<QueryPool,DispatchLoaderStatic>;
-  template <typename Dispatch> class UniqueHandleTraits<RenderPass,Dispatch> {public: using deleter = ObjectDestroy<Device,Dispatch>; };
-  using UniqueRenderPass = UniqueHandle<RenderPass,DispatchLoaderStatic>;
-  template <typename Dispatch> class UniqueHandleTraits<Sampler,Dispatch> {public: using deleter = ObjectDestroy<Device,Dispatch>; };
-  using UniqueSampler = UniqueHandle<Sampler,DispatchLoaderStatic>;
-  template <typename Dispatch> class UniqueHandleTraits<SamplerYcbcrConversion,Dispatch> {public: using deleter = ObjectDestroy<Device,Dispatch>; };
-  using UniqueSamplerYcbcrConversion = UniqueHandle<SamplerYcbcrConversion,DispatchLoaderStatic>;
-  template <typename Dispatch> class UniqueHandleTraits<Semaphore,Dispatch> {public: using deleter = ObjectDestroy<Device,Dispatch>; };
-  using UniqueSemaphore = UniqueHandle<Semaphore,DispatchLoaderStatic>;
-  template <typename Dispatch> class UniqueHandleTraits<ShaderModule,Dispatch> {public: using deleter = ObjectDestroy<Device,Dispatch>; };
-  using UniqueShaderModule = UniqueHandle<ShaderModule,DispatchLoaderStatic>;
-  template <typename Dispatch> class UniqueHandleTraits<SwapchainKHR,Dispatch> {public: using deleter = ObjectDestroy<Device,Dispatch>; };
-  using UniqueSwapchainKHR = UniqueHandle<SwapchainKHR,DispatchLoaderStatic>;
-  template <typename Dispatch> class UniqueHandleTraits<ValidationCacheEXT,Dispatch> {public: using deleter = ObjectDestroy<Device,Dispatch>; };
-  using UniqueValidationCacheEXT = UniqueHandle<ValidationCacheEXT,DispatchLoaderStatic>;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-
-  class Device
-  {
-  public:
-    VULKAN_HPP_CONSTEXPR Device()
-      : m_device(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_CONSTEXPR Device( std::nullptr_t )
-      : m_device(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT Device( VkDevice device )
-      : m_device( device )
-    {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
-    Device & operator=(VkDevice device)
-    {
-      m_device = device;
-      return *this; 
-    }
-#endif
-
-    Device & operator=( std::nullptr_t )
-    {
-      m_device = VK_NULL_HANDLE;
-      return *this;
-    }
-
-    bool operator==( Device const & rhs ) const
-    {
-      return m_device == rhs.m_device;
-    }
-
-    bool operator!=(Device const & rhs ) const
-    {
-      return m_device != rhs.m_device;
-    }
-
-    bool operator<(Device const & rhs ) const
-    {
-      return m_device < rhs.m_device;
-    }
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    PFN_vkVoidFunction getProcAddr( const char* pName, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    PFN_vkVoidFunction getProcAddr( const std::string & name, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroy( const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroy( Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Queue* pQueue, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    Queue getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result waitIdle(Dispatch const &d = Dispatch() ) const;
-#else
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<void>::type waitIdle(Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result allocateMemory( const MemoryAllocateInfo* pAllocateInfo, const AllocationCallbacks* pAllocator, DeviceMemory* pMemory, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<DeviceMemory>::type allocateMemory( const MemoryAllocateInfo & allocateInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-    template<typename Dispatch = DispatchLoaderStatic>
-    typename ResultValueType<UniqueHandle<DeviceMemory,Dispatch>>::type allocateMemoryUnique( const MemoryAllocateInfo & allocateInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void freeMemory( DeviceMemory memory, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void freeMemory( DeviceMemory memory, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void free( DeviceMemory memory, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void free( DeviceMemory memory, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result mapMemory( DeviceMemory memory, DeviceSize offset, DeviceSize size, MemoryMapFlags flags, void** ppData, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<void*>::type mapMemory( DeviceMemory memory, DeviceSize offset, DeviceSize size, MemoryMapFlags flags = MemoryMapFlags(), Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void unmapMemory( DeviceMemory memory, Dispatch const &d = Dispatch() ) const;
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result flushMappedMemoryRanges( uint32_t memoryRangeCount, const MappedMemoryRange* pMemoryRanges, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<void>::type flushMappedMemoryRanges( ArrayProxy<const MappedMemoryRange> memoryRanges, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result invalidateMappedMemoryRanges( uint32_t memoryRangeCount, const MappedMemoryRange* pMemoryRanges, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<void>::type invalidateMappedMemoryRanges( ArrayProxy<const MappedMemoryRange> memoryRanges, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void getMemoryCommitment( DeviceMemory memory, DeviceSize* pCommittedMemoryInBytes, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    DeviceSize getMemoryCommitment( DeviceMemory memory, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void getBufferMemoryRequirements( Buffer buffer, MemoryRequirements* pMemoryRequirements, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    MemoryRequirements getBufferMemoryRequirements( Buffer buffer, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result bindBufferMemory( Buffer buffer, DeviceMemory memory, DeviceSize memoryOffset, Dispatch const &d = Dispatch() ) const;
-#else
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<void>::type bindBufferMemory( Buffer buffer, DeviceMemory memory, DeviceSize memoryOffset, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void getImageMemoryRequirements( Image image, MemoryRequirements* pMemoryRequirements, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    MemoryRequirements getImageMemoryRequirements( Image image, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result bindImageMemory( Image image, DeviceMemory memory, DeviceSize memoryOffset, Dispatch const &d = Dispatch() ) const;
-#else
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<void>::type bindImageMemory( Image image, DeviceMemory memory, DeviceSize memoryOffset, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void getImageSparseMemoryRequirements( Image image, uint32_t* pSparseMemoryRequirementCount, SparseImageMemoryRequirements* pSparseMemoryRequirements, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Allocator = std::allocator<SparseImageMemoryRequirements>, typename Dispatch = DispatchLoaderStatic> 
-    std::vector<SparseImageMemoryRequirements,Allocator> getImageSparseMemoryRequirements( Image image, Dispatch const &d = Dispatch() ) const;
-    template <typename Allocator = std::allocator<SparseImageMemoryRequirements>, typename Dispatch = DispatchLoaderStatic> 
-    std::vector<SparseImageMemoryRequirements,Allocator> getImageSparseMemoryRequirements( Image image, Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result createFence( const FenceCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Fence* pFence, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<Fence>::type createFence( const FenceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-    template<typename Dispatch = DispatchLoaderStatic>
-    typename ResultValueType<UniqueHandle<Fence,Dispatch>>::type createFenceUnique( const FenceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroyFence( Fence fence, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroyFence( Fence fence, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroy( Fence fence, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroy( Fence fence, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result resetFences( uint32_t fenceCount, const Fence* pFences, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<void>::type resetFences( ArrayProxy<const Fence> fences, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result getFenceStatus( Fence fence, Dispatch const &d = Dispatch() ) const;
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result waitForFences( uint32_t fenceCount, const Fence* pFences, Bool32 waitAll, uint64_t timeout, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result waitForFences( ArrayProxy<const Fence> fences, Bool32 waitAll, uint64_t timeout, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result createSemaphore( const SemaphoreCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Semaphore* pSemaphore, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<Semaphore>::type createSemaphore( const SemaphoreCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-    template<typename Dispatch = DispatchLoaderStatic>
-    typename ResultValueType<UniqueHandle<Semaphore,Dispatch>>::type createSemaphoreUnique( const SemaphoreCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroySemaphore( Semaphore semaphore, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroySemaphore( Semaphore semaphore, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroy( Semaphore semaphore, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroy( Semaphore semaphore, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result createEvent( const EventCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Event* pEvent, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<Event>::type createEvent( const EventCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-    template<typename Dispatch = DispatchLoaderStatic>
-    typename ResultValueType<UniqueHandle<Event,Dispatch>>::type createEventUnique( const EventCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroyEvent( Event event, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroyEvent( Event event, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroy( Event event, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroy( Event event, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result getEventStatus( Event event, Dispatch const &d = Dispatch() ) const;
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result setEvent( Event event, Dispatch const &d = Dispatch() ) const;
-#else
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<void>::type setEvent( Event event, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result resetEvent( Event event, Dispatch const &d = Dispatch() ) const;
-#else
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<void>::type resetEvent( Event event, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result createQueryPool( const QueryPoolCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, QueryPool* pQueryPool, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<QueryPool>::type createQueryPool( const QueryPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-    template<typename Dispatch = DispatchLoaderStatic>
-    typename ResultValueType<UniqueHandle<QueryPool,Dispatch>>::type createQueryPoolUnique( const QueryPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroyQueryPool( QueryPool queryPool, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroyQueryPool( QueryPool queryPool, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroy( QueryPool queryPool, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroy( QueryPool queryPool, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result getQueryPoolResults( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, DeviceSize stride, QueryResultFlags flags, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename T, typename Dispatch = DispatchLoaderStatic>
-    Result getQueryPoolResults( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, ArrayProxy<T> data, DeviceSize stride, QueryResultFlags flags, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result createBuffer( const BufferCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Buffer* pBuffer, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<Buffer>::type createBuffer( const BufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-    template<typename Dispatch = DispatchLoaderStatic>
-    typename ResultValueType<UniqueHandle<Buffer,Dispatch>>::type createBufferUnique( const BufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroyBuffer( Buffer buffer, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroyBuffer( Buffer buffer, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroy( Buffer buffer, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroy( Buffer buffer, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result createBufferView( const BufferViewCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, BufferView* pView, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<BufferView>::type createBufferView( const BufferViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-    template<typename Dispatch = DispatchLoaderStatic>
-    typename ResultValueType<UniqueHandle<BufferView,Dispatch>>::type createBufferViewUnique( const BufferViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroyBufferView( BufferView bufferView, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroyBufferView( BufferView bufferView, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroy( BufferView bufferView, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroy( BufferView bufferView, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result createImage( const ImageCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Image* pImage, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<Image>::type createImage( const ImageCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-    template<typename Dispatch = DispatchLoaderStatic>
-    typename ResultValueType<UniqueHandle<Image,Dispatch>>::type createImageUnique( const ImageCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroyImage( Image image, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroyImage( Image image, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroy( Image image, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroy( Image image, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void getImageSubresourceLayout( Image image, const ImageSubresource* pSubresource, SubresourceLayout* pLayout, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    SubresourceLayout getImageSubresourceLayout( Image image, const ImageSubresource & subresource, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result createImageView( const ImageViewCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, ImageView* pView, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<ImageView>::type createImageView( const ImageViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-    template<typename Dispatch = DispatchLoaderStatic>
-    typename ResultValueType<UniqueHandle<ImageView,Dispatch>>::type createImageViewUnique( const ImageViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroyImageView( ImageView imageView, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroyImageView( ImageView imageView, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroy( ImageView imageView, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroy( ImageView imageView, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result createShaderModule( const ShaderModuleCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, ShaderModule* pShaderModule, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<ShaderModule>::type createShaderModule( const ShaderModuleCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-    template<typename Dispatch = DispatchLoaderStatic>
-    typename ResultValueType<UniqueHandle<ShaderModule,Dispatch>>::type createShaderModuleUnique( const ShaderModuleCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroyShaderModule( ShaderModule shaderModule, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroyShaderModule( ShaderModule shaderModule, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroy( ShaderModule shaderModule, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroy( ShaderModule shaderModule, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result createPipelineCache( const PipelineCacheCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, PipelineCache* pPipelineCache, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<PipelineCache>::type createPipelineCache( const PipelineCacheCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-    template<typename Dispatch = DispatchLoaderStatic>
-    typename ResultValueType<UniqueHandle<PipelineCache,Dispatch>>::type createPipelineCacheUnique( const PipelineCacheCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroyPipelineCache( PipelineCache pipelineCache, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroyPipelineCache( PipelineCache pipelineCache, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroy( PipelineCache pipelineCache, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroy( PipelineCache pipelineCache, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result getPipelineCacheData( PipelineCache pipelineCache, size_t* pDataSize, void* pData, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Allocator = std::allocator<uint8_t>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<uint8_t,Allocator>>::type getPipelineCacheData( PipelineCache pipelineCache, Dispatch const &d = Dispatch() ) const;
-    template <typename Allocator = std::allocator<uint8_t>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<uint8_t,Allocator>>::type getPipelineCacheData( PipelineCache pipelineCache, Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result mergePipelineCaches( PipelineCache dstCache, uint32_t srcCacheCount, const PipelineCache* pSrcCaches, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<void>::type mergePipelineCaches( PipelineCache dstCache, ArrayProxy<const PipelineCache> srcCaches, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result createGraphicsPipelines( PipelineCache pipelineCache, uint32_t createInfoCount, const GraphicsPipelineCreateInfo* pCreateInfos, const AllocationCallbacks* pAllocator, Pipeline* pPipelines, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Allocator = std::allocator<Pipeline>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<Pipeline,Allocator>>::type createGraphicsPipelines( PipelineCache pipelineCache, ArrayProxy<const GraphicsPipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-    template <typename Allocator = std::allocator<Pipeline>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<Pipeline,Allocator>>::type createGraphicsPipelines( PipelineCache pipelineCache, ArrayProxy<const GraphicsPipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const;
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<Pipeline>::type createGraphicsPipeline( PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-    template <typename Allocator = std::allocator<UniquePipeline>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>>::type createGraphicsPipelinesUnique( PipelineCache pipelineCache, ArrayProxy<const GraphicsPipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-    template <typename Allocator = std::allocator<UniquePipeline>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>>::type createGraphicsPipelinesUnique( PipelineCache pipelineCache, ArrayProxy<const GraphicsPipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const;
-    template<typename Dispatch = DispatchLoaderStatic>
-    typename ResultValueType<UniqueHandle<Pipeline,Dispatch>>::type createGraphicsPipelineUnique( PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result createComputePipelines( PipelineCache pipelineCache, uint32_t createInfoCount, const ComputePipelineCreateInfo* pCreateInfos, const AllocationCallbacks* pAllocator, Pipeline* pPipelines, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Allocator = std::allocator<Pipeline>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<Pipeline,Allocator>>::type createComputePipelines( PipelineCache pipelineCache, ArrayProxy<const ComputePipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-    template <typename Allocator = std::allocator<Pipeline>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<Pipeline,Allocator>>::type createComputePipelines( PipelineCache pipelineCache, ArrayProxy<const ComputePipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const;
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<Pipeline>::type createComputePipeline( PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-    template <typename Allocator = std::allocator<UniquePipeline>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>>::type createComputePipelinesUnique( PipelineCache pipelineCache, ArrayProxy<const ComputePipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-    template <typename Allocator = std::allocator<UniquePipeline>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>>::type createComputePipelinesUnique( PipelineCache pipelineCache, ArrayProxy<const ComputePipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const;
-    template<typename Dispatch = DispatchLoaderStatic>
-    typename ResultValueType<UniqueHandle<Pipeline,Dispatch>>::type createComputePipelineUnique( PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroyPipeline( Pipeline pipeline, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroyPipeline( Pipeline pipeline, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroy( Pipeline pipeline, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroy( Pipeline pipeline, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result createPipelineLayout( const PipelineLayoutCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, PipelineLayout* pPipelineLayout, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<PipelineLayout>::type createPipelineLayout( const PipelineLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-    template<typename Dispatch = DispatchLoaderStatic>
-    typename ResultValueType<UniqueHandle<PipelineLayout,Dispatch>>::type createPipelineLayoutUnique( const PipelineLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroyPipelineLayout( PipelineLayout pipelineLayout, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroyPipelineLayout( PipelineLayout pipelineLayout, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroy( PipelineLayout pipelineLayout, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroy( PipelineLayout pipelineLayout, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result createSampler( const SamplerCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Sampler* pSampler, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<Sampler>::type createSampler( const SamplerCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-    template<typename Dispatch = DispatchLoaderStatic>
-    typename ResultValueType<UniqueHandle<Sampler,Dispatch>>::type createSamplerUnique( const SamplerCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroySampler( Sampler sampler, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroySampler( Sampler sampler, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroy( Sampler sampler, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroy( Sampler sampler, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, DescriptorSetLayout* pSetLayout, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<DescriptorSetLayout>::type createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-    template<typename Dispatch = DispatchLoaderStatic>
-    typename ResultValueType<UniqueHandle<DescriptorSetLayout,Dispatch>>::type createDescriptorSetLayoutUnique( const DescriptorSetLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroyDescriptorSetLayout( DescriptorSetLayout descriptorSetLayout, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroyDescriptorSetLayout( DescriptorSetLayout descriptorSetLayout, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroy( DescriptorSetLayout descriptorSetLayout, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroy( DescriptorSetLayout descriptorSetLayout, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result createDescriptorPool( const DescriptorPoolCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, DescriptorPool* pDescriptorPool, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<DescriptorPool>::type createDescriptorPool( const DescriptorPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-    template<typename Dispatch = DispatchLoaderStatic>
-    typename ResultValueType<UniqueHandle<DescriptorPool,Dispatch>>::type createDescriptorPoolUnique( const DescriptorPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroyDescriptorPool( DescriptorPool descriptorPool, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroyDescriptorPool( DescriptorPool descriptorPool, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroy( DescriptorPool descriptorPool, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroy( DescriptorPool descriptorPool, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result resetDescriptorPool( DescriptorPool descriptorPool, DescriptorPoolResetFlags flags = DescriptorPoolResetFlags(), Dispatch const &d = Dispatch() ) const;
-#else
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<void>::type resetDescriptorPool( DescriptorPool descriptorPool, DescriptorPoolResetFlags flags = DescriptorPoolResetFlags(), Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result allocateDescriptorSets( const DescriptorSetAllocateInfo* pAllocateInfo, DescriptorSet* pDescriptorSets, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Allocator = std::allocator<DescriptorSet>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<DescriptorSet,Allocator>>::type allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const &d = Dispatch() ) const;
-    template <typename Allocator = std::allocator<DescriptorSet>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<DescriptorSet,Allocator>>::type allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-    template <typename Allocator = std::allocator<UniqueDescriptorSet>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<UniqueHandle<DescriptorSet,Dispatch>,Allocator>>::type allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const &d = Dispatch() ) const;
-    template <typename Allocator = std::allocator<UniqueDescriptorSet>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<UniqueHandle<DescriptorSet,Dispatch>,Allocator>>::type allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result freeDescriptorSets( DescriptorPool descriptorPool, uint32_t descriptorSetCount, const DescriptorSet* pDescriptorSets, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<void>::type freeDescriptorSets( DescriptorPool descriptorPool, ArrayProxy<const DescriptorSet> descriptorSets, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result free( DescriptorPool descriptorPool, uint32_t descriptorSetCount, const DescriptorSet* pDescriptorSets, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<void>::type free( DescriptorPool descriptorPool, ArrayProxy<const DescriptorSet> descriptorSets, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void updateDescriptorSets( uint32_t descriptorWriteCount, const WriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const CopyDescriptorSet* pDescriptorCopies, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void updateDescriptorSets( ArrayProxy<const WriteDescriptorSet> descriptorWrites, ArrayProxy<const CopyDescriptorSet> descriptorCopies, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result createFramebuffer( const FramebufferCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Framebuffer* pFramebuffer, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<Framebuffer>::type createFramebuffer( const FramebufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-    template<typename Dispatch = DispatchLoaderStatic>
-    typename ResultValueType<UniqueHandle<Framebuffer,Dispatch>>::type createFramebufferUnique( const FramebufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroyFramebuffer( Framebuffer framebuffer, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroyFramebuffer( Framebuffer framebuffer, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroy( Framebuffer framebuffer, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroy( Framebuffer framebuffer, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result createRenderPass( const RenderPassCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, RenderPass* pRenderPass, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<RenderPass>::type createRenderPass( const RenderPassCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-    template<typename Dispatch = DispatchLoaderStatic>
-    typename ResultValueType<UniqueHandle<RenderPass,Dispatch>>::type createRenderPassUnique( const RenderPassCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroyRenderPass( RenderPass renderPass, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroyRenderPass( RenderPass renderPass, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroy( RenderPass renderPass, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroy( RenderPass renderPass, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void getRenderAreaGranularity( RenderPass renderPass, Extent2D* pGranularity, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    Extent2D getRenderAreaGranularity( RenderPass renderPass, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result createCommandPool( const CommandPoolCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, CommandPool* pCommandPool, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<CommandPool>::type createCommandPool( const CommandPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-    template<typename Dispatch = DispatchLoaderStatic>
-    typename ResultValueType<UniqueHandle<CommandPool,Dispatch>>::type createCommandPoolUnique( const CommandPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroyCommandPool( CommandPool commandPool, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroyCommandPool( CommandPool commandPool, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroy( CommandPool commandPool, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroy( CommandPool commandPool, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result resetCommandPool( CommandPool commandPool, CommandPoolResetFlags flags, Dispatch const &d = Dispatch() ) const;
-#else
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<void>::type resetCommandPool( CommandPool commandPool, CommandPoolResetFlags flags, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result allocateCommandBuffers( const CommandBufferAllocateInfo* pAllocateInfo, CommandBuffer* pCommandBuffers, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Allocator = std::allocator<CommandBuffer>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<CommandBuffer,Allocator>>::type allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Dispatch const &d = Dispatch() ) const;
-    template <typename Allocator = std::allocator<CommandBuffer>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<CommandBuffer,Allocator>>::type allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-    template <typename Allocator = std::allocator<UniqueCommandBuffer>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<UniqueHandle<CommandBuffer,Dispatch>,Allocator>>::type allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, Dispatch const &d = Dispatch() ) const;
-    template <typename Allocator = std::allocator<UniqueCommandBuffer>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<UniqueHandle<CommandBuffer,Dispatch>,Allocator>>::type allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void freeCommandBuffers( CommandPool commandPool, uint32_t commandBufferCount, const CommandBuffer* pCommandBuffers, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void freeCommandBuffers( CommandPool commandPool, ArrayProxy<const CommandBuffer> commandBuffers, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void free( CommandPool commandPool, uint32_t commandBufferCount, const CommandBuffer* pCommandBuffers, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void free( CommandPool commandPool, ArrayProxy<const CommandBuffer> commandBuffers, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result createSharedSwapchainsKHR( uint32_t swapchainCount, const SwapchainCreateInfoKHR* pCreateInfos, const AllocationCallbacks* pAllocator, SwapchainKHR* pSwapchains, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Allocator = std::allocator<SwapchainKHR>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<SwapchainKHR,Allocator>>::type createSharedSwapchainsKHR( ArrayProxy<const SwapchainCreateInfoKHR> createInfos, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-    template <typename Allocator = std::allocator<SwapchainKHR>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<SwapchainKHR,Allocator>>::type createSharedSwapchainsKHR( ArrayProxy<const SwapchainCreateInfoKHR> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const;
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<SwapchainKHR>::type createSharedSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-    template <typename Allocator = std::allocator<UniqueSwapchainKHR>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<UniqueHandle<SwapchainKHR,Dispatch>,Allocator>>::type createSharedSwapchainsKHRUnique( ArrayProxy<const SwapchainCreateInfoKHR> createInfos, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-    template <typename Allocator = std::allocator<UniqueSwapchainKHR>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<UniqueHandle<SwapchainKHR,Dispatch>,Allocator>>::type createSharedSwapchainsKHRUnique( ArrayProxy<const SwapchainCreateInfoKHR> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const;
-    template<typename Dispatch = DispatchLoaderStatic>
-    typename ResultValueType<UniqueHandle<SwapchainKHR,Dispatch>>::type createSharedSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result createSwapchainKHR( const SwapchainCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SwapchainKHR* pSwapchain, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<SwapchainKHR>::type createSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-    template<typename Dispatch = DispatchLoaderStatic>
-    typename ResultValueType<UniqueHandle<SwapchainKHR,Dispatch>>::type createSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroySwapchainKHR( SwapchainKHR swapchain, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroySwapchainKHR( SwapchainKHR swapchain, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroy( SwapchainKHR swapchain, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroy( SwapchainKHR swapchain, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result getSwapchainImagesKHR( SwapchainKHR swapchain, uint32_t* pSwapchainImageCount, Image* pSwapchainImages, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Allocator = std::allocator<Image>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<Image,Allocator>>::type getSwapchainImagesKHR( SwapchainKHR swapchain, Dispatch const &d = Dispatch() ) const;
-    template <typename Allocator = std::allocator<Image>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<Image,Allocator>>::type getSwapchainImagesKHR( SwapchainKHR swapchain, Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result acquireNextImageKHR( SwapchainKHR swapchain, uint64_t timeout, Semaphore semaphore, Fence fence, uint32_t* pImageIndex, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValue<uint32_t> acquireNextImageKHR( SwapchainKHR swapchain, uint64_t timeout, Semaphore semaphore, Fence fence, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT* pNameInfo, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<void>::type debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT & nameInfo, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT* pTagInfo, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<void>::type debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT & tagInfo, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VK_USE_PLATFORM_WIN32_NV
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result getMemoryWin32HandleNV( DeviceMemory memory, ExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<HANDLE>::type getMemoryWin32HandleNV( DeviceMemory memory, ExternalMemoryHandleTypeFlagsNV handleType, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_WIN32_NV*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result createIndirectCommandsLayoutNVX( const IndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const AllocationCallbacks* pAllocator, IndirectCommandsLayoutNVX* pIndirectCommandsLayout, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<IndirectCommandsLayoutNVX>::type createIndirectCommandsLayoutNVX( const IndirectCommandsLayoutCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-    template<typename Dispatch = DispatchLoaderStatic>
-    typename ResultValueType<UniqueHandle<IndirectCommandsLayoutNVX,Dispatch>>::type createIndirectCommandsLayoutNVXUnique( const IndirectCommandsLayoutCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroyIndirectCommandsLayoutNVX( IndirectCommandsLayoutNVX indirectCommandsLayout, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroyIndirectCommandsLayoutNVX( IndirectCommandsLayoutNVX indirectCommandsLayout, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroy( IndirectCommandsLayoutNVX indirectCommandsLayout, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroy( IndirectCommandsLayoutNVX indirectCommandsLayout, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result createObjectTableNVX( const ObjectTableCreateInfoNVX* pCreateInfo, const AllocationCallbacks* pAllocator, ObjectTableNVX* pObjectTable, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<ObjectTableNVX>::type createObjectTableNVX( const ObjectTableCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-    template<typename Dispatch = DispatchLoaderStatic>
-    typename ResultValueType<UniqueHandle<ObjectTableNVX,Dispatch>>::type createObjectTableNVXUnique( const ObjectTableCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroyObjectTableNVX( ObjectTableNVX objectTable, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroyObjectTableNVX( ObjectTableNVX objectTable, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroy( ObjectTableNVX objectTable, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroy( ObjectTableNVX objectTable, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result registerObjectsNVX( ObjectTableNVX objectTable, uint32_t objectCount, const ObjectTableEntryNVX* const* ppObjectTableEntries, const uint32_t* pObjectIndices, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<void>::type registerObjectsNVX( ObjectTableNVX objectTable, ArrayProxy<const ObjectTableEntryNVX* const> pObjectTableEntries, ArrayProxy<const uint32_t> objectIndices, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result unregisterObjectsNVX( ObjectTableNVX objectTable, uint32_t objectCount, const ObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<void>::type unregisterObjectsNVX( ObjectTableNVX objectTable, ArrayProxy<const ObjectEntryTypeNVX> objectEntryTypes, ArrayProxy<const uint32_t> objectIndices, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void trimCommandPool( CommandPool commandPool, CommandPoolTrimFlags flags = CommandPoolTrimFlags(), Dispatch const &d = Dispatch() ) const;
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void trimCommandPoolKHR( CommandPool commandPool, CommandPoolTrimFlags flags = CommandPoolTrimFlags(), Dispatch const &d = Dispatch() ) const;
 
 #ifdef VK_USE_PLATFORM_WIN32_KHR
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result getMemoryWin32HandleKHR( const MemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d = Dispatch() ) const;
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR* pModes, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModes2EXT( m_device, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ), reinterpret_cast< VkDeviceGroupPresentModeFlagsKHR *>( pModes ) ) );
+  }
+
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<HANDLE>::type getMemoryWin32HandleKHR( const MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d = Dispatch() ) const;
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type Device::getGroupSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes;
+    Result result = static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModes2EXT( m_device, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), reinterpret_cast< VkDeviceGroupPresentModeFlagsKHR *>( &modes ) ) );
+    return createResultValue( result, modes, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModes2EXT" );
+  }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 #endif /*VK_USE_PLATFORM_WIN32_KHR*/
 
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result getMemoryWin32HandlePropertiesKHR( ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, MemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<MemoryWin32HandlePropertiesKHR>::type getMemoryWin32HandlePropertiesKHR( ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
 
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result getMemoryFdKHR( const MemoryGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<int>::type getMemoryFdKHR( const MemoryGetFdInfoKHR & getFdInfo, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR* pModes, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModesKHR( m_device, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast< VkDeviceGroupPresentModeFlagsKHR *>( pModes ) ) );
+  }
 
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result getMemoryFdPropertiesKHR( ExternalMemoryHandleTypeFlagBits handleType, int fd, MemoryFdPropertiesKHR* pMemoryFdProperties, Dispatch const &d = Dispatch() ) const;
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<MemoryFdPropertiesKHR>::type getMemoryFdPropertiesKHR( ExternalMemoryHandleTypeFlagBits handleType, int fd, Dispatch const &d = Dispatch() ) const;
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes;
+    Result result = static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModesKHR( m_device, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast< VkDeviceGroupPresentModeFlagsKHR *>( &modes ) ) );
+    return createResultValue( result, modes, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModesKHR" );
+  }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result getSemaphoreWin32HandleKHR( const SemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<HANDLE>::type getSemaphoreWin32HandleKHR( const SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
 
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<void>::type importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize* pCommittedMemoryInBytes, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetDeviceMemoryCommitment( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast< VkDeviceSize *>( pCommittedMemoryInBytes ) );
+  }
 
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result getSemaphoreFdKHR( const SemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d = Dispatch() ) const;
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<int>::type getSemaphoreFdKHR( const SemaphoreGetFdInfoKHR & getFdInfo, Dispatch const &d = Dispatch() ) const;
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize Device::getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::DeviceSize committedMemoryInBytes;
+    d.vkGetDeviceMemoryCommitment( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast< VkDeviceSize *>( &committedMemoryInBytes ) );
+    return committedMemoryInBytes;
+  }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<void>::type importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result getFenceWin32HandleKHR( const FenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<HANDLE>::type getFenceWin32HandleKHR( const FenceGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo* pInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return d.vkGetDeviceMemoryOpaqueCaptureAddress( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( pInfo ) );
+  }
 
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo, Dispatch const &d = Dispatch() ) const;
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<void>::type importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, Dispatch const &d = Dispatch() ) const;
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddress( const DeviceMemoryOpaqueCaptureAddressInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    return d.vkGetDeviceMemoryOpaqueCaptureAddress( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( &info ) );
+  }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
 
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result getFenceFdKHR( const FenceGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<int>::type getFenceFdKHR( const FenceGetFdInfoKHR & getFdInfo, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo* pInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return d.vkGetDeviceMemoryOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( pInfo ) );
+  }
 
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result importFenceFdKHR( const ImportFenceFdInfoKHR* pImportFenceFdInfo, Dispatch const &d = Dispatch() ) const;
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<void>::type importFenceFdKHR( const ImportFenceFdInfoKHR & importFenceFdInfo, Dispatch const &d = Dispatch() ) const;
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddressKHR( const DeviceMemoryOpaqueCaptureAddressInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    return d.vkGetDeviceMemoryOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( &info ) );
+  }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result displayPowerControlEXT( DisplayKHR display, const DisplayPowerInfoEXT* pDisplayPowerInfo, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<void>::type displayPowerControlEXT( DisplayKHR display, const DisplayPowerInfoEXT & displayPowerInfo, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result registerEventEXT( const DeviceEventInfoEXT* pDeviceEventInfo, const AllocationCallbacks* pAllocator, Fence* pFence, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<Fence>::type registerEventEXT( const DeviceEventInfoEXT & deviceEventInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result registerDisplayEventEXT( DisplayKHR display, const DisplayEventInfoEXT* pDisplayEventInfo, const AllocationCallbacks* pAllocator, Fence* pFence, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<Fence>::type registerDisplayEventEXT( DisplayKHR display, const DisplayEventInfoEXT & displayEventInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result getSwapchainCounterEXT( SwapchainKHR swapchain, SurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<uint64_t>::type getSwapchainCounterEXT( SwapchainKHR swapchain, SurfaceCounterFlagBitsEXT counter, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    PeerMemoryFeatureFlags getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    PeerMemoryFeatureFlags getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result bindBufferMemory2( uint32_t bindInfoCount, const BindBufferMemoryInfo* pBindInfos, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<void>::type bindBufferMemory2( ArrayProxy<const BindBufferMemoryInfo> bindInfos, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result bindBufferMemory2KHR( uint32_t bindInfoCount, const BindBufferMemoryInfo* pBindInfos, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<void>::type bindBufferMemory2KHR( ArrayProxy<const BindBufferMemoryInfo> bindInfos, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result bindImageMemory2( uint32_t bindInfoCount, const BindImageMemoryInfo* pBindInfos, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<void>::type bindImageMemory2( ArrayProxy<const BindImageMemoryInfo> bindInfos, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result bindImageMemory2KHR( uint32_t bindInfoCount, const BindImageMemoryInfo* pBindInfos, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<void>::type bindImageMemory2KHR( ArrayProxy<const BindImageMemoryInfo> bindInfos, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result getGroupPresentCapabilitiesKHR( DeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<DeviceGroupPresentCapabilitiesKHR>::type getGroupPresentCapabilitiesKHR(Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result getGroupSurfacePresentModesKHR( SurfaceKHR surface, DeviceGroupPresentModeFlagsKHR* pModes, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<DeviceGroupPresentModeFlagsKHR>::type getGroupSurfacePresentModesKHR( SurfaceKHR surface, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result acquireNextImage2KHR( const AcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValue<uint32_t> acquireNextImage2KHR( const AcquireNextImageInfoKHR & acquireInfo, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result createDescriptorUpdateTemplate( const DescriptorUpdateTemplateCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<DescriptorUpdateTemplate>::type createDescriptorUpdateTemplate( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-    template<typename Dispatch = DispatchLoaderStatic>
-    typename ResultValueType<UniqueHandle<DescriptorUpdateTemplate,Dispatch>>::type createDescriptorUpdateTemplateUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result createDescriptorUpdateTemplateKHR( const DescriptorUpdateTemplateCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<DescriptorUpdateTemplate>::type createDescriptorUpdateTemplateKHR( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-    template<typename Dispatch = DispatchLoaderStatic>
-    typename ResultValueType<UniqueHandle<DescriptorUpdateTemplate,Dispatch>>::type createDescriptorUpdateTemplateKHRUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroyDescriptorUpdateTemplate( DescriptorUpdateTemplate descriptorUpdateTemplate, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroyDescriptorUpdateTemplate( DescriptorUpdateTemplate descriptorUpdateTemplate, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroy( DescriptorUpdateTemplate descriptorUpdateTemplate, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroy( DescriptorUpdateTemplate descriptorUpdateTemplate, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroyDescriptorUpdateTemplateKHR( DescriptorUpdateTemplate descriptorUpdateTemplate, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroyDescriptorUpdateTemplateKHR( DescriptorUpdateTemplate descriptorUpdateTemplate, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void updateDescriptorSetWithTemplate( DescriptorSet descriptorSet, DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d = Dispatch() ) const;
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void updateDescriptorSetWithTemplateKHR( DescriptorSet descriptorSet, DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d = Dispatch() ) const;
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void setHdrMetadataEXT( uint32_t swapchainCount, const SwapchainKHR* pSwapchains, const HdrMetadataEXT* pMetadata, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void setHdrMetadataEXT( ArrayProxy<const SwapchainKHR> swapchains, ArrayProxy<const HdrMetadataEXT> metadata, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result getSwapchainStatusKHR( SwapchainKHR swapchain, Dispatch const &d = Dispatch() ) const;
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result getRefreshCycleDurationGOOGLE( SwapchainKHR swapchain, RefreshCycleDurationGOOGLE* pDisplayTimingProperties, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<RefreshCycleDurationGOOGLE>::type getRefreshCycleDurationGOOGLE( SwapchainKHR swapchain, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result getPastPresentationTimingGOOGLE( SwapchainKHR swapchain, uint32_t* pPresentationTimingCount, PastPresentationTimingGOOGLE* pPresentationTimings, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Allocator = std::allocator<PastPresentationTimingGOOGLE>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<PastPresentationTimingGOOGLE,Allocator>>::type getPastPresentationTimingGOOGLE( SwapchainKHR swapchain, Dispatch const &d = Dispatch() ) const;
-    template <typename Allocator = std::allocator<PastPresentationTimingGOOGLE>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<PastPresentationTimingGOOGLE,Allocator>>::type getPastPresentationTimingGOOGLE( SwapchainKHR swapchain, Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2* pInfo, MemoryRequirements2* pMemoryRequirements, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    MemoryRequirements2 getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d = Dispatch() ) const;
-    template <typename X, typename Y, typename ...Z, typename Dispatch = DispatchLoaderStatic>
-    StructureChain<X, Y, Z...> getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2* pInfo, MemoryRequirements2* pMemoryRequirements, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    MemoryRequirements2 getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d = Dispatch() ) const;
-    template <typename X, typename Y, typename ...Z, typename Dispatch = DispatchLoaderStatic>
-    StructureChain<X, Y, Z...> getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2* pInfo, MemoryRequirements2* pMemoryRequirements, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    MemoryRequirements2 getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d = Dispatch() ) const;
-    template <typename X, typename Y, typename ...Z, typename Dispatch = DispatchLoaderStatic>
-    StructureChain<X, Y, Z...> getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2* pInfo, MemoryRequirements2* pMemoryRequirements, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    MemoryRequirements2 getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d = Dispatch() ) const;
-    template <typename X, typename Y, typename ...Z, typename Dispatch = DispatchLoaderStatic>
-    StructureChain<X, Y, Z...> getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, SparseImageMemoryRequirements2* pSparseMemoryRequirements, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Allocator = std::allocator<SparseImageMemoryRequirements2>, typename Dispatch = DispatchLoaderStatic> 
-    std::vector<SparseImageMemoryRequirements2,Allocator> getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, Dispatch const &d = Dispatch() ) const;
-    template <typename Allocator = std::allocator<SparseImageMemoryRequirements2>, typename Dispatch = DispatchLoaderStatic> 
-    std::vector<SparseImageMemoryRequirements2,Allocator> getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, SparseImageMemoryRequirements2* pSparseMemoryRequirements, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Allocator = std::allocator<SparseImageMemoryRequirements2>, typename Dispatch = DispatchLoaderStatic> 
-    std::vector<SparseImageMemoryRequirements2,Allocator> getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, Dispatch const &d = Dispatch() ) const;
-    template <typename Allocator = std::allocator<SparseImageMemoryRequirements2>, typename Dispatch = DispatchLoaderStatic> 
-    std::vector<SparseImageMemoryRequirements2,Allocator> getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result createSamplerYcbcrConversion( const SamplerYcbcrConversionCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, SamplerYcbcrConversion* pYcbcrConversion, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<SamplerYcbcrConversion>::type createSamplerYcbcrConversion( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-    template<typename Dispatch = DispatchLoaderStatic>
-    typename ResultValueType<UniqueHandle<SamplerYcbcrConversion,Dispatch>>::type createSamplerYcbcrConversionUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result createSamplerYcbcrConversionKHR( const SamplerYcbcrConversionCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, SamplerYcbcrConversion* pYcbcrConversion, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<SamplerYcbcrConversion>::type createSamplerYcbcrConversionKHR( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-    template<typename Dispatch = DispatchLoaderStatic>
-    typename ResultValueType<UniqueHandle<SamplerYcbcrConversion,Dispatch>>::type createSamplerYcbcrConversionKHRUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroySamplerYcbcrConversion( SamplerYcbcrConversion ycbcrConversion, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroySamplerYcbcrConversion( SamplerYcbcrConversion ycbcrConversion, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroy( SamplerYcbcrConversion ycbcrConversion, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroy( SamplerYcbcrConversion ycbcrConversion, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroySamplerYcbcrConversionKHR( SamplerYcbcrConversion ycbcrConversion, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroySamplerYcbcrConversionKHR( SamplerYcbcrConversion ycbcrConversion, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void getQueue2( const DeviceQueueInfo2* pQueueInfo, Queue* pQueue, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    Queue getQueue2( const DeviceQueueInfo2 & queueInfo, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result createValidationCacheEXT( const ValidationCacheCreateInfoEXT* pCreateInfo, const AllocationCallbacks* pAllocator, ValidationCacheEXT* pValidationCache, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<ValidationCacheEXT>::type createValidationCacheEXT( const ValidationCacheCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-    template<typename Dispatch = DispatchLoaderStatic>
-    typename ResultValueType<UniqueHandle<ValidationCacheEXT,Dispatch>>::type createValidationCacheEXTUnique( const ValidationCacheCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroyValidationCacheEXT( ValidationCacheEXT validationCache, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroyValidationCacheEXT( ValidationCacheEXT validationCache, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroy( ValidationCacheEXT validationCache, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroy( ValidationCacheEXT validationCache, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result getValidationCacheDataEXT( ValidationCacheEXT validationCache, size_t* pDataSize, void* pData, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Allocator = std::allocator<uint8_t>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<uint8_t,Allocator>>::type getValidationCacheDataEXT( ValidationCacheEXT validationCache, Dispatch const &d = Dispatch() ) const;
-    template <typename Allocator = std::allocator<uint8_t>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<uint8_t,Allocator>>::type getValidationCacheDataEXT( ValidationCacheEXT validationCache, Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result mergeValidationCachesEXT( ValidationCacheEXT dstCache, uint32_t srcCacheCount, const ValidationCacheEXT* pSrcCaches, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<void>::type mergeValidationCachesEXT( ValidationCacheEXT dstCache, ArrayProxy<const ValidationCacheEXT> srcCaches, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo* pCreateInfo, DescriptorSetLayoutSupport* pSupport, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    DescriptorSetLayoutSupport getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d = Dispatch() ) const;
-    template <typename X, typename Y, typename ...Z, typename Dispatch = DispatchLoaderStatic>
-    StructureChain<X, Y, Z...> getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo* pCreateInfo, DescriptorSetLayoutSupport* pSupport, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    DescriptorSetLayoutSupport getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d = Dispatch() ) const;
-    template <typename X, typename Y, typename ...Z, typename Dispatch = DispatchLoaderStatic>
-    StructureChain<X, Y, Z...> getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result getShaderInfoAMD( Pipeline pipeline, ShaderStageFlagBits shaderStage, ShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Allocator = std::allocator<uint8_t>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<uint8_t,Allocator>>::type getShaderInfoAMD( Pipeline pipeline, ShaderStageFlagBits shaderStage, ShaderInfoTypeAMD infoType, Dispatch const &d = Dispatch() ) const;
-    template <typename Allocator = std::allocator<uint8_t>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<uint8_t,Allocator>>::type getShaderInfoAMD( Pipeline pipeline, ShaderStageFlagBits shaderStage, ShaderInfoTypeAMD infoType, Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result getCalibratedTimestampsEXT( uint32_t timestampCount, const CalibratedTimestampInfoEXT* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<uint64_t>::type getCalibratedTimestampsEXT( ArrayProxy<const CalibratedTimestampInfoEXT> timestampInfos, ArrayProxy<uint64_t> timestamps, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result setDebugUtilsObjectNameEXT( const DebugUtilsObjectNameInfoEXT* pNameInfo, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<void>::type setDebugUtilsObjectNameEXT( const DebugUtilsObjectNameInfoEXT & nameInfo, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result setDebugUtilsObjectTagEXT( const DebugUtilsObjectTagInfoEXT* pTagInfo, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<void>::type setDebugUtilsObjectTagEXT( const DebugUtilsObjectTagInfoEXT & tagInfo, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result getMemoryHostPointerPropertiesEXT( ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, MemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<MemoryHostPointerPropertiesEXT>::type getMemoryHostPointerPropertiesEXT( ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result createRenderPass2KHR( const RenderPassCreateInfo2KHR* pCreateInfo, const AllocationCallbacks* pAllocator, RenderPass* pRenderPass, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<RenderPass>::type createRenderPass2KHR( const RenderPassCreateInfo2KHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-    template<typename Dispatch = DispatchLoaderStatic>
-    typename ResultValueType<UniqueHandle<RenderPass,Dispatch>>::type createRenderPass2KHRUnique( const RenderPassCreateInfo2KHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VK_USE_PLATFORM_ANDROID_ANDROID
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer* buffer, AndroidHardwareBufferPropertiesANDROID* pProperties, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<AndroidHardwareBufferPropertiesANDROID>::type getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const &d = Dispatch() ) const;
-    template <typename X, typename Y, typename ...Z, typename Dispatch = DispatchLoaderStatic>
-    typename ResultValueType<StructureChain<X, Y, Z...>>::type getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/
-
-#ifdef VK_USE_PLATFORM_ANDROID_ANDROID
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result getMemoryAndroidHardwareBufferANDROID( const MemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<struct AHardwareBuffer*>::type getMemoryAndroidHardwareBufferANDROID( const MemoryGetAndroidHardwareBufferInfoANDROID & info, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result compileDeferredNV( Pipeline pipeline, uint32_t shader, Dispatch const &d = Dispatch() ) const;
-#else
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<void>::type compileDeferredNV( Pipeline pipeline, uint32_t shader, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result createAccelerationStructureNV( const AccelerationStructureCreateInfoNV* pCreateInfo, const AllocationCallbacks* pAllocator, AccelerationStructureNV* pAccelerationStructure, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<AccelerationStructureNV>::type createAccelerationStructureNV( const AccelerationStructureCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-    template<typename Dispatch = DispatchLoaderStatic>
-    typename ResultValueType<UniqueHandle<AccelerationStructureNV,Dispatch>>::type createAccelerationStructureNVUnique( const AccelerationStructureCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroyAccelerationStructureNV( AccelerationStructureNV accelerationStructure, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroyAccelerationStructureNV( AccelerationStructureNV accelerationStructure, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroy( AccelerationStructureNV accelerationStructure, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroy( AccelerationStructureNV accelerationStructure, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV* pInfo, MemoryRequirements2KHR* pMemoryRequirements, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    MemoryRequirements2KHR getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV & info, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result bindAccelerationStructureMemoryNV( uint32_t bindInfoCount, const BindAccelerationStructureMemoryInfoNV* pBindInfos, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<void>::type bindAccelerationStructureMemoryNV( ArrayProxy<const BindAccelerationStructureMemoryInfoNV> bindInfos, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result getRayTracingShaderGroupHandlesNV( Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename T, typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<void>::type getRayTracingShaderGroupHandlesNV( Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy<T> data, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result getAccelerationStructureHandleNV( AccelerationStructureNV accelerationStructure, size_t dataSize, void* pData, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename T, typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<void>::type getAccelerationStructureHandleNV( AccelerationStructureNV accelerationStructure, ArrayProxy<T> data, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result createRayTracingPipelinesNV( PipelineCache pipelineCache, uint32_t createInfoCount, const RayTracingPipelineCreateInfoNV* pCreateInfos, const AllocationCallbacks* pAllocator, Pipeline* pPipelines, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Allocator = std::allocator<Pipeline>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<Pipeline,Allocator>>::type createRayTracingPipelinesNV( PipelineCache pipelineCache, ArrayProxy<const RayTracingPipelineCreateInfoNV> createInfos, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-    template <typename Allocator = std::allocator<Pipeline>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<Pipeline,Allocator>>::type createRayTracingPipelinesNV( PipelineCache pipelineCache, ArrayProxy<const RayTracingPipelineCreateInfoNV> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const;
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<Pipeline>::type createRayTracingPipelineNV( PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-    template <typename Allocator = std::allocator<UniquePipeline>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>>::type createRayTracingPipelinesNVUnique( PipelineCache pipelineCache, ArrayProxy<const RayTracingPipelineCreateInfoNV> createInfos, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-    template <typename Allocator = std::allocator<UniquePipeline>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>>::type createRayTracingPipelinesNVUnique( PipelineCache pipelineCache, ArrayProxy<const RayTracingPipelineCreateInfoNV> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const;
-    template<typename Dispatch = DispatchLoaderStatic>
-    typename ResultValueType<UniqueHandle<Pipeline,Dispatch>>::type createRayTracingPipelineNVUnique( PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result getImageDrmFormatModifierPropertiesEXT( Image image, ImageDrmFormatModifierPropertiesEXT* pProperties, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<ImageDrmFormatModifierPropertiesEXT>::type getImageDrmFormatModifierPropertiesEXT( Image image, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDevice() const
-    {
-      return m_device;
-    }
-
-    explicit operator bool() const
-    {
-      return m_device != VK_NULL_HANDLE;
-    }
-
-    bool operator!() const
-    {
-      return m_device == VK_NULL_HANDLE;
-    }
-
-  private:
-    VkDevice m_device;
-  };
-
-  static_assert( sizeof( Device ) == sizeof( VkDevice ), "handle and wrapper have different size!" );
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const char* pName, Dispatch const &d) const
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const char* pName, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
     return d.vkGetDeviceProcAddr( m_device, pName );
   }
+
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const std::string & name, Dispatch const &d ) const
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const std::string & name, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
   {
     return d.vkGetDeviceProcAddr( m_device, name.c_str() );
   }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
   template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroy( const AllocationCallbacks* pAllocator, Dispatch const &d) const
-  {
-    d.vkDestroyDevice( m_device, reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroy( Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    d.vkDestroyDevice( m_device, reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Queue* pQueue, Dispatch const &d) const
+  VULKAN_HPP_INLINE void Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, VULKAN_HPP_NAMESPACE::Queue* pQueue, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
   {
     d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue*>( pQueue ) );
   }
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
   template<typename Dispatch>
-  VULKAN_HPP_INLINE Queue Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Dispatch const &d ) const
+  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Queue Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
   {
-    Queue queue;
+    VULKAN_HPP_NAMESPACE::Queue queue;
     d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue*>( &queue ) );
     return queue;
   }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
   template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::waitIdle(Dispatch const &d) const
+  VULKAN_HPP_INLINE void Device::getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2* pQueueInfo, VULKAN_HPP_NAMESPACE::Queue* pQueue, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
   {
-    return static_cast<Result>( d.vkDeviceWaitIdle( m_device ) );
-  }
-#else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<void>::type Device::waitIdle(Dispatch const &d ) const
-  {
-    Result result = static_cast<Result>( d.vkDeviceWaitIdle( m_device ) );
-    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::waitIdle" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::allocateMemory( const MemoryAllocateInfo* pAllocateInfo, const AllocationCallbacks* pAllocator, DeviceMemory* pMemory, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkAllocateMemory( m_device, reinterpret_cast<const VkMemoryAllocateInfo*>( pAllocateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDeviceMemory*>( pMemory ) ) );
+    d.vkGetDeviceQueue2( m_device, reinterpret_cast<const VkDeviceQueueInfo2*>( pQueueInfo ), reinterpret_cast<VkQueue*>( pQueue ) );
   }
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
   template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<DeviceMemory>::type Device::allocateMemory( const MemoryAllocateInfo & allocateInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Queue Device::getQueue2( const DeviceQueueInfo2 & queueInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
   {
-    DeviceMemory memory;
-    Result result = static_cast<Result>( d.vkAllocateMemory( m_device, reinterpret_cast<const VkMemoryAllocateInfo*>( &allocateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDeviceMemory*>( &memory ) ) );
-    return createResultValue( result, memory, VULKAN_HPP_NAMESPACE_STRING"::Device::allocateMemory" );
-  }
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<DeviceMemory,Dispatch>>::type Device::allocateMemoryUnique( const MemoryAllocateInfo & allocateInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    DeviceMemory memory;
-    Result result = static_cast<Result>( d.vkAllocateMemory( m_device, reinterpret_cast<const VkMemoryAllocateInfo*>( &allocateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDeviceMemory*>( &memory ) ) );
-
-    ObjectFree<Device,Dispatch> deleter( *this, allocator, d );
-    return createResultValue<DeviceMemory,Dispatch>( result, memory, VULKAN_HPP_NAMESPACE_STRING"::Device::allocateMemoryUnique", deleter );
-  }
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::freeMemory( DeviceMemory memory, const AllocationCallbacks* pAllocator, Dispatch const &d) const
-  {
-    d.vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::freeMemory( DeviceMemory memory, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    d.vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+    VULKAN_HPP_NAMESPACE::Queue queue;
+    d.vkGetDeviceQueue2( m_device, reinterpret_cast<const VkDeviceQueueInfo2*>( &queueInfo ), reinterpret_cast<VkQueue*>( &queue ) );
+    return queue;
   }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::free( DeviceMemory memory, const AllocationCallbacks* pAllocator, Dispatch const &d) const
-  {
-    d.vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::free( DeviceMemory memory, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    d.vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::mapMemory( DeviceMemory memory, DeviceSize offset, DeviceSize size, MemoryMapFlags flags, void** ppData, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkMapMemory( m_device, static_cast<VkDeviceMemory>( memory ), offset, size, static_cast<VkMemoryMapFlags>( flags ), ppData ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<void*>::type Device::mapMemory( DeviceMemory memory, DeviceSize offset, DeviceSize size, MemoryMapFlags flags, Dispatch const &d ) const
-  {
-    void* pData;
-    Result result = static_cast<Result>( d.vkMapMemory( m_device, static_cast<VkDeviceMemory>( memory ), offset, size, static_cast<VkMemoryMapFlags>( flags ), &pData ) );
-    return createResultValue( result, pData, VULKAN_HPP_NAMESPACE_STRING"::Device::mapMemory" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::unmapMemory( DeviceMemory memory, Dispatch const &d) const
-  {
-    d.vkUnmapMemory( m_device, static_cast<VkDeviceMemory>( memory ) );
-  }
-#else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::unmapMemory( DeviceMemory memory, Dispatch const &d ) const
-  {
-    d.vkUnmapMemory( m_device, static_cast<VkDeviceMemory>( memory ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::flushMappedMemoryRanges( uint32_t memoryRangeCount, const MappedMemoryRange* pMemoryRanges, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkFlushMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange*>( pMemoryRanges ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<void>::type Device::flushMappedMemoryRanges( ArrayProxy<const MappedMemoryRange> memoryRanges, Dispatch const &d ) const
-  {
-    Result result = static_cast<Result>( d.vkFlushMappedMemoryRanges( m_device, memoryRanges.size() , reinterpret_cast<const VkMappedMemoryRange*>( memoryRanges.data() ) ) );
-    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::flushMappedMemoryRanges" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::invalidateMappedMemoryRanges( uint32_t memoryRangeCount, const MappedMemoryRange* pMemoryRanges, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkInvalidateMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange*>( pMemoryRanges ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<void>::type Device::invalidateMappedMemoryRanges( ArrayProxy<const MappedMemoryRange> memoryRanges, Dispatch const &d ) const
-  {
-    Result result = static_cast<Result>( d.vkInvalidateMappedMemoryRanges( m_device, memoryRanges.size() , reinterpret_cast<const VkMappedMemoryRange*>( memoryRanges.data() ) ) );
-    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::invalidateMappedMemoryRanges" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::getMemoryCommitment( DeviceMemory memory, DeviceSize* pCommittedMemoryInBytes, Dispatch const &d) const
-  {
-    d.vkGetDeviceMemoryCommitment( m_device, static_cast<VkDeviceMemory>( memory ), pCommittedMemoryInBytes );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE DeviceSize Device::getMemoryCommitment( DeviceMemory memory, Dispatch const &d ) const
-  {
-    DeviceSize committedMemoryInBytes;
-    d.vkGetDeviceMemoryCommitment( m_device, static_cast<VkDeviceMemory>( memory ), &committedMemoryInBytes );
-    return committedMemoryInBytes;
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements( Buffer buffer, MemoryRequirements* pMemoryRequirements, Dispatch const &d) const
-  {
-    d.vkGetBufferMemoryRequirements( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<VkMemoryRequirements*>( pMemoryRequirements ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE MemoryRequirements Device::getBufferMemoryRequirements( Buffer buffer, Dispatch const &d ) const
-  {
-    MemoryRequirements memoryRequirements;
-    d.vkGetBufferMemoryRequirements( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<VkMemoryRequirements*>( &memoryRequirements ) );
-    return memoryRequirements;
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::bindBufferMemory( Buffer buffer, DeviceMemory memory, DeviceSize memoryOffset, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkBindBufferMemory( m_device, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceMemory>( memory ), memoryOffset ) );
-  }
-#else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<void>::type Device::bindBufferMemory( Buffer buffer, DeviceMemory memory, DeviceSize memoryOffset, Dispatch const &d ) const
-  {
-    Result result = static_cast<Result>( d.vkBindBufferMemory( m_device, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceMemory>( memory ), memoryOffset ) );
-    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindBufferMemory" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::getImageMemoryRequirements( Image image, MemoryRequirements* pMemoryRequirements, Dispatch const &d) const
-  {
-    d.vkGetImageMemoryRequirements( m_device, static_cast<VkImage>( image ), reinterpret_cast<VkMemoryRequirements*>( pMemoryRequirements ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE MemoryRequirements Device::getImageMemoryRequirements( Image image, Dispatch const &d ) const
-  {
-    MemoryRequirements memoryRequirements;
-    d.vkGetImageMemoryRequirements( m_device, static_cast<VkImage>( image ), reinterpret_cast<VkMemoryRequirements*>( &memoryRequirements ) );
-    return memoryRequirements;
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::bindImageMemory( Image image, DeviceMemory memory, DeviceSize memoryOffset, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkBindImageMemory( m_device, static_cast<VkImage>( image ), static_cast<VkDeviceMemory>( memory ), memoryOffset ) );
-  }
-#else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<void>::type Device::bindImageMemory( Image image, DeviceMemory memory, DeviceSize memoryOffset, Dispatch const &d ) const
-  {
-    Result result = static_cast<Result>( d.vkBindImageMemory( m_device, static_cast<VkImage>( image ), static_cast<VkDeviceMemory>( memory ), memoryOffset ) );
-    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindImageMemory" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements( Image image, uint32_t* pSparseMemoryRequirementCount, SparseImageMemoryRequirements* pSparseMemoryRequirements, Dispatch const &d) const
-  {
-    d.vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), pSparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements*>( pSparseMemoryRequirements ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements,Allocator> Device::getImageSparseMemoryRequirements( Image image, Dispatch const &d ) const
-  {
-    std::vector<SparseImageMemoryRequirements,Allocator> sparseMemoryRequirements;
-    uint32_t sparseMemoryRequirementCount;
-    d.vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, nullptr );
-    sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
-    d.vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements*>( sparseMemoryRequirements.data() ) );
-    return sparseMemoryRequirements;
-  }
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements,Allocator> Device::getImageSparseMemoryRequirements( Image image, Allocator const& vectorAllocator, Dispatch const &d ) const
-  {
-    std::vector<SparseImageMemoryRequirements,Allocator> sparseMemoryRequirements( vectorAllocator );
-    uint32_t sparseMemoryRequirementCount;
-    d.vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, nullptr );
-    sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
-    d.vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements*>( sparseMemoryRequirements.data() ) );
-    return sparseMemoryRequirements;
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::createFence( const FenceCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Fence* pFence, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkCreateFence( m_device, reinterpret_cast<const VkFenceCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkFence*>( pFence ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<Fence>::type Device::createFence( const FenceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    Fence fence;
-    Result result = static_cast<Result>( d.vkCreateFence( m_device, reinterpret_cast<const VkFenceCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkFence*>( &fence ) ) );
-    return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING"::Device::createFence" );
-  }
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Fence,Dispatch>>::type Device::createFenceUnique( const FenceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    Fence fence;
-    Result result = static_cast<Result>( d.vkCreateFence( m_device, reinterpret_cast<const VkFenceCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkFence*>( &fence ) ) );
-
-    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
-    return createResultValue<Fence,Dispatch>( result, fence, VULKAN_HPP_NAMESPACE_STRING"::Device::createFenceUnique", deleter );
-  }
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroyFence( Fence fence, const AllocationCallbacks* pAllocator, Dispatch const &d) const
-  {
-    d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroyFence( Fence fence, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroy( Fence fence, const AllocationCallbacks* pAllocator, Dispatch const &d) const
-  {
-    d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroy( Fence fence, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::resetFences( uint32_t fenceCount, const Fence* pFences, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkResetFences( m_device, fenceCount, reinterpret_cast<const VkFence*>( pFences ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<void>::type Device::resetFences( ArrayProxy<const Fence> fences, Dispatch const &d ) const
-  {
-    Result result = static_cast<Result>( d.vkResetFences( m_device, fences.size() , reinterpret_cast<const VkFence*>( fences.data() ) ) );
-    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::resetFences" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::getFenceStatus( Fence fence, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkGetFenceStatus( m_device, static_cast<VkFence>( fence ) ) );
-  }
-#else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::getFenceStatus( Fence fence, Dispatch const &d ) const
-  {
-    Result result = static_cast<Result>( d.vkGetFenceStatus( m_device, static_cast<VkFence>( fence ) ) );
-    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getFenceStatus", { Result::eSuccess, Result::eNotReady } );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::waitForFences( uint32_t fenceCount, const Fence* pFences, Bool32 waitAll, uint64_t timeout, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkWaitForFences( m_device, fenceCount, reinterpret_cast<const VkFence*>( pFences ), waitAll, timeout ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::waitForFences( ArrayProxy<const Fence> fences, Bool32 waitAll, uint64_t timeout, Dispatch const &d ) const
-  {
-    Result result = static_cast<Result>( d.vkWaitForFences( m_device, fences.size() , reinterpret_cast<const VkFence*>( fences.data() ), waitAll, timeout ) );
-    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::waitForFences", { Result::eSuccess, Result::eTimeout } );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::createSemaphore( const SemaphoreCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Semaphore* pSemaphore, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkCreateSemaphore( m_device, reinterpret_cast<const VkSemaphoreCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSemaphore*>( pSemaphore ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<Semaphore>::type Device::createSemaphore( const SemaphoreCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    Semaphore semaphore;
-    Result result = static_cast<Result>( d.vkCreateSemaphore( m_device, reinterpret_cast<const VkSemaphoreCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSemaphore*>( &semaphore ) ) );
-    return createResultValue( result, semaphore, VULKAN_HPP_NAMESPACE_STRING"::Device::createSemaphore" );
-  }
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Semaphore,Dispatch>>::type Device::createSemaphoreUnique( const SemaphoreCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    Semaphore semaphore;
-    Result result = static_cast<Result>( d.vkCreateSemaphore( m_device, reinterpret_cast<const VkSemaphoreCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSemaphore*>( &semaphore ) ) );
-
-    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
-    return createResultValue<Semaphore,Dispatch>( result, semaphore, VULKAN_HPP_NAMESPACE_STRING"::Device::createSemaphoreUnique", deleter );
-  }
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroySemaphore( Semaphore semaphore, const AllocationCallbacks* pAllocator, Dispatch const &d) const
-  {
-    d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroySemaphore( Semaphore semaphore, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroy( Semaphore semaphore, const AllocationCallbacks* pAllocator, Dispatch const &d) const
-  {
-    d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroy( Semaphore semaphore, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::createEvent( const EventCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Event* pEvent, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkCreateEvent( m_device, reinterpret_cast<const VkEventCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkEvent*>( pEvent ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<Event>::type Device::createEvent( const EventCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    Event event;
-    Result result = static_cast<Result>( d.vkCreateEvent( m_device, reinterpret_cast<const VkEventCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkEvent*>( &event ) ) );
-    return createResultValue( result, event, VULKAN_HPP_NAMESPACE_STRING"::Device::createEvent" );
-  }
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Event,Dispatch>>::type Device::createEventUnique( const EventCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    Event event;
-    Result result = static_cast<Result>( d.vkCreateEvent( m_device, reinterpret_cast<const VkEventCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkEvent*>( &event ) ) );
-
-    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
-    return createResultValue<Event,Dispatch>( result, event, VULKAN_HPP_NAMESPACE_STRING"::Device::createEventUnique", deleter );
-  }
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroyEvent( Event event, const AllocationCallbacks* pAllocator, Dispatch const &d) const
-  {
-    d.vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroyEvent( Event event, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    d.vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroy( Event event, const AllocationCallbacks* pAllocator, Dispatch const &d) const
-  {
-    d.vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroy( Event event, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    d.vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::getEventStatus( Event event, Dispatch const &d) const
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
     return static_cast<Result>( d.vkGetEventStatus( m_device, static_cast<VkEvent>( event ) ) );
   }
 #else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::getEventStatus( Event event, Dispatch const &d ) const
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const
   {
     Result result = static_cast<Result>( d.vkGetEventStatus( m_device, static_cast<VkEvent>( event ) ) );
-    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getEventStatus", { Result::eEventSet, Result::eEventReset } );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEventStatus", { VULKAN_HPP_NAMESPACE::Result::eEventSet, VULKAN_HPP_NAMESPACE::Result::eEventReset } );
   }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkGetFenceFdKHR( m_device, reinterpret_cast<const VkFenceGetFdInfoKHR *>( pGetFdInfo ), pFd ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<int>::type Device::getFenceFdKHR( const FenceGetFdInfoKHR & getFdInfo, Dispatch const & d ) const
+  {
+    int fd;
+    Result result = static_cast<Result>( d.vkGetFenceFdKHR( m_device, reinterpret_cast<const VkFenceGetFdInfoKHR *>( &getFdInfo ), &fd ) );
+    return createResultValue( result, fd, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceFdKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
 
 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::setEvent( Event event, Dispatch const &d) const
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
-    return static_cast<Result>( d.vkSetEvent( m_device, static_cast<VkEvent>( event ) ) );
+    return static_cast<Result>( d.vkGetFenceStatus( m_device, static_cast<VkFence>( fence ) ) );
   }
 #else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<void>::type Device::setEvent( Event event, Dispatch const &d ) const
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const
   {
-    Result result = static_cast<Result>( d.vkSetEvent( m_device, static_cast<VkEvent>( event ) ) );
-    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::setEvent" );
+    Result result = static_cast<Result>( d.vkGetFenceStatus( m_device, static_cast<VkFence>( fence ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceStatus", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } );
   }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::resetEvent( Event event, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkResetEvent( m_device, static_cast<VkEvent>( event ) ) );
-  }
-#else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<void>::type Device::resetEvent( Event event, Dispatch const &d ) const
-  {
-    Result result = static_cast<Result>( d.vkResetEvent( m_device, static_cast<VkEvent>( event ) ) );
-    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::resetEvent" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::createQueryPool( const QueryPoolCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, QueryPool* pQueryPool, Dispatch const &d) const
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
-    return static_cast<Result>( d.vkCreateQueryPool( m_device, reinterpret_cast<const VkQueryPoolCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkQueryPool*>( pQueryPool ) ) );
+    return static_cast<Result>( d.vkGetFenceWin32HandleKHR( m_device, reinterpret_cast<const VkFenceGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) );
   }
+
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<QueryPool>::type Device::createQueryPool( const QueryPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type Device::getFenceWin32HandleKHR( const FenceGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const
   {
-    QueryPool queryPool;
-    Result result = static_cast<Result>( d.vkCreateQueryPool( m_device, reinterpret_cast<const VkQueryPoolCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkQueryPool*>( &queryPool ) ) );
-    return createResultValue( result, queryPool, VULKAN_HPP_NAMESPACE_STRING"::Device::createQueryPool" );
+    HANDLE handle;
+    Result result = static_cast<Result>( d.vkGetFenceWin32HandleKHR( m_device, reinterpret_cast<const VkFenceGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle ) );
+    return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceWin32HandleKHR" );
   }
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<QueryPool,Dispatch>>::type Device::createQueryPoolUnique( const QueryPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    QueryPool queryPool;
-    Result result = static_cast<Result>( d.vkCreateQueryPool( m_device, reinterpret_cast<const VkQueryPoolCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkQueryPool*>( &queryPool ) ) );
-
-    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
-    return createResultValue<QueryPool,Dispatch>( result, queryPool, VULKAN_HPP_NAMESPACE_STRING"::Device::createQueryPoolUnique", deleter );
-  }
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroyQueryPool( QueryPool queryPool, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
-    d.vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+    d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device, reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( pInfo ), reinterpret_cast< VkMemoryRequirements2 *>( pMemoryRequirements ) );
   }
+
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroyQueryPool( QueryPool queryPool, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getGeneratedCommandsMemoryRequirementsNV( const GeneratedCommandsMemoryRequirementsInfoNV & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
   {
-    d.vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
+    d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device, reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( &info ), reinterpret_cast< VkMemoryRequirements2 *>( &memoryRequirements ) );
+    return memoryRequirements;
+  }
+
+  template <typename X, typename Y, typename... Z, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getGeneratedCommandsMemoryRequirementsNV( const GeneratedCommandsMemoryRequirementsInfoNV & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
+    d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device, reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( &info ), reinterpret_cast< VkMemoryRequirements2 *>( &memoryRequirements ) );
+    return structureChain;
   }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroy( QueryPool queryPool, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT* pProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
-    d.vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+    return static_cast<Result>( d.vkGetImageDrmFormatModifierPropertiesEXT( m_device, static_cast<VkImage>( image ), reinterpret_cast< VkImageDrmFormatModifierPropertiesEXT *>( pProperties ) ) );
   }
+
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroy( QueryPool queryPool, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT>::type Device::getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const
   {
-    d.vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+    VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT properties;
+    Result result = static_cast<Result>( d.vkGetImageDrmFormatModifierPropertiesEXT( m_device, static_cast<VkImage>( image ), reinterpret_cast< VkImageDrmFormatModifierPropertiesEXT *>( &properties ) ) );
+    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageDrmFormatModifierPropertiesEXT" );
   }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::getQueryPoolResults( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, DeviceSize stride, QueryResultFlags flags, Dispatch const &d) const
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::MemoryRequirements* pMemoryRequirements, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
-    return static_cast<Result>( d.vkGetQueryPoolResults( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, dataSize, pData, stride, static_cast<VkQueryResultFlags>( flags ) ) );
+    d.vkGetImageMemoryRequirements( m_device, static_cast<VkImage>( image ), reinterpret_cast< VkMemoryRequirements *>( pMemoryRequirements ) );
   }
+
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template <typename T, typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::getQueryPoolResults( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, ArrayProxy<T> data, DeviceSize stride, QueryResultFlags flags, Dispatch const &d ) const
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements Device::getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
   {
-    Result result = static_cast<Result>( d.vkGetQueryPoolResults( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, data.size() * sizeof( T ) , reinterpret_cast<void*>( data.data() ), stride, static_cast<VkQueryResultFlags>( flags ) ) );
-    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getQueryPoolResults", { Result::eSuccess, Result::eNotReady } );
+    VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements;
+    d.vkGetImageMemoryRequirements( m_device, static_cast<VkImage>( image ), reinterpret_cast< VkMemoryRequirements *>( &memoryRequirements ) );
+    return memoryRequirements;
   }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::createBuffer( const BufferCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Buffer* pBuffer, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkCreateBuffer( m_device, reinterpret_cast<const VkBufferCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkBuffer*>( pBuffer ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<Buffer>::type Device::createBuffer( const BufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    Buffer buffer;
-    Result result = static_cast<Result>( d.vkCreateBuffer( m_device, reinterpret_cast<const VkBufferCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkBuffer*>( &buffer ) ) );
-    return createResultValue( result, buffer, VULKAN_HPP_NAMESPACE_STRING"::Device::createBuffer" );
-  }
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Buffer,Dispatch>>::type Device::createBufferUnique( const BufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    Buffer buffer;
-    Result result = static_cast<Result>( d.vkCreateBuffer( m_device, reinterpret_cast<const VkBufferCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkBuffer*>( &buffer ) ) );
 
-    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
-    return createResultValue<Buffer,Dispatch>( result, buffer, VULKAN_HPP_NAMESPACE_STRING"::Device::createBufferUnique", deleter );
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetImageMemoryRequirements2( m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( pInfo ), reinterpret_cast< VkMemoryRequirements2 *>( pMemoryRequirements ) );
   }
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroyBuffer( Buffer buffer, const AllocationCallbacks* pAllocator, Dispatch const &d) const
-  {
-    d.vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
-  }
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroyBuffer( Buffer buffer, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
   {
-    d.vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
+    d.vkGetImageMemoryRequirements2( m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast< VkMemoryRequirements2 *>( &memoryRequirements ) );
+    return memoryRequirements;
+  }
+
+  template <typename X, typename Y, typename... Z, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
+    d.vkGetImageMemoryRequirements2( m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast< VkMemoryRequirements2 *>( &memoryRequirements ) );
+    return structureChain;
   }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroy( Buffer buffer, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
-    d.vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+    d.vkGetImageMemoryRequirements2KHR( m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( pInfo ), reinterpret_cast< VkMemoryRequirements2 *>( pMemoryRequirements ) );
   }
+
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroy( Buffer buffer, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
   {
-    d.vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
+    d.vkGetImageMemoryRequirements2KHR( m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast< VkMemoryRequirements2 *>( &memoryRequirements ) );
+    return memoryRequirements;
+  }
+
+  template <typename X, typename Y, typename... Z, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
+    d.vkGetImageMemoryRequirements2KHR( m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast< VkMemoryRequirements2 *>( &memoryRequirements ) );
+    return structureChain;
   }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::createBufferView( const BufferViewCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, BufferView* pView, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkCreateBufferView( m_device, reinterpret_cast<const VkBufferViewCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkBufferView*>( pView ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<BufferView>::type Device::createBufferView( const BufferViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    BufferView view;
-    Result result = static_cast<Result>( d.vkCreateBufferView( m_device, reinterpret_cast<const VkBufferViewCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkBufferView*>( &view ) ) );
-    return createResultValue( result, view, VULKAN_HPP_NAMESPACE_STRING"::Device::createBufferView" );
-  }
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<BufferView,Dispatch>>::type Device::createBufferViewUnique( const BufferViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    BufferView view;
-    Result result = static_cast<Result>( d.vkCreateBufferView( m_device, reinterpret_cast<const VkBufferViewCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkBufferView*>( &view ) ) );
 
-    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
-    return createResultValue<BufferView,Dispatch>( result, view, VULKAN_HPP_NAMESPACE_STRING"::Device::createBufferViewUnique", deleter );
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, uint32_t* pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements* pSparseMemoryRequirements, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), pSparseMemoryRequirementCount, reinterpret_cast< VkSparseImageMemoryRequirements *>( pSparseMemoryRequirements ) );
   }
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroyBufferView( BufferView bufferView, const AllocationCallbacks* pAllocator, Dispatch const &d) const
-  {
-    d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
-  }
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroyBufferView( BufferView bufferView, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  template <typename SparseImageMemoryRequirementsAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const
   {
-    d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+    std::vector<SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> sparseMemoryRequirements;
+    uint32_t sparseMemoryRequirementCount;
+    d.vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, nullptr );
+    sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+    d.vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements *>( sparseMemoryRequirements.data() ) );
+    VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
+    return sparseMemoryRequirements;
+  }
+
+  template <typename SparseImageMemoryRequirementsAllocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, SparseImageMemoryRequirements>::value, int>::type >
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, SparseImageMemoryRequirementsAllocator & sparseImageMemoryRequirementsAllocator, Dispatch const & d ) const
+  {
+    std::vector<SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> sparseMemoryRequirements( sparseImageMemoryRequirementsAllocator );
+    uint32_t sparseMemoryRequirementCount;
+    d.vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, nullptr );
+    sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+    d.vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements *>( sparseMemoryRequirements.data() ) );
+    VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
+    return sparseMemoryRequirements;
   }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroy( BufferView bufferView, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2* pSparseMemoryRequirements, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
-    d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+    d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( pInfo ), pSparseMemoryRequirementCount, reinterpret_cast< VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) );
   }
+
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroy( BufferView bufferView, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  template <typename SparseImageMemoryRequirements2Allocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> Device::getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, Dispatch const & d ) const
   {
-    d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+    std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements;
+    uint32_t sparseMemoryRequirementCount;
+    d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr );
+    sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+    d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
+    VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
+    return sparseMemoryRequirements;
+  }
+
+  template <typename SparseImageMemoryRequirements2Allocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, SparseImageMemoryRequirements2>::value, int>::type >
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> Device::getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, Dispatch const & d ) const
+  {
+    std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements( sparseImageMemoryRequirements2Allocator );
+    uint32_t sparseMemoryRequirementCount;
+    d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr );
+    sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+    d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
+    VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
+    return sparseMemoryRequirements;
   }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::createImage( const ImageCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Image* pImage, Dispatch const &d) const
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2* pSparseMemoryRequirements, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
-    return static_cast<Result>( d.vkCreateImage( m_device, reinterpret_cast<const VkImageCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkImage*>( pImage ) ) );
+    d.vkGetImageSparseMemoryRequirements2KHR( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( pInfo ), pSparseMemoryRequirementCount, reinterpret_cast< VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) );
   }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<Image>::type Device::createImage( const ImageCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    Image image;
-    Result result = static_cast<Result>( d.vkCreateImage( m_device, reinterpret_cast<const VkImageCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkImage*>( &image ) ) );
-    return createResultValue( result, image, VULKAN_HPP_NAMESPACE_STRING"::Device::createImage" );
-  }
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Image,Dispatch>>::type Device::createImageUnique( const ImageCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    Image image;
-    Result result = static_cast<Result>( d.vkCreateImage( m_device, reinterpret_cast<const VkImageCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkImage*>( &image ) ) );
 
-    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
-    return createResultValue<Image,Dispatch>( result, image, VULKAN_HPP_NAMESPACE_STRING"::Device::createImageUnique", deleter );
-  }
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroyImage( Image image, const AllocationCallbacks* pAllocator, Dispatch const &d) const
-  {
-    d.vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
-  }
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroyImage( Image image, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  template <typename SparseImageMemoryRequirements2Allocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> Device::getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, Dispatch const & d ) const
   {
-    d.vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+    std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements;
+    uint32_t sparseMemoryRequirementCount;
+    d.vkGetImageSparseMemoryRequirements2KHR( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr );
+    sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+    d.vkGetImageSparseMemoryRequirements2KHR( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
+    VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
+    return sparseMemoryRequirements;
+  }
+
+  template <typename SparseImageMemoryRequirements2Allocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, SparseImageMemoryRequirements2>::value, int>::type >
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> Device::getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, Dispatch const & d ) const
+  {
+    std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements( sparseImageMemoryRequirements2Allocator );
+    uint32_t sparseMemoryRequirementCount;
+    d.vkGetImageSparseMemoryRequirements2KHR( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr );
+    sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+    d.vkGetImageSparseMemoryRequirements2KHR( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
+    VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
+    return sparseMemoryRequirements;
   }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroy( Image image, const AllocationCallbacks* pAllocator, Dispatch const &d) const
-  {
-    d.vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroy( Image image, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    d.vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::getImageSubresourceLayout( Image image, const ImageSubresource* pSubresource, SubresourceLayout* pLayout, Dispatch const &d) const
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource* pSubresource, VULKAN_HPP_NAMESPACE::SubresourceLayout* pLayout, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
-    d.vkGetImageSubresourceLayout( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkImageSubresource*>( pSubresource ), reinterpret_cast<VkSubresourceLayout*>( pLayout ) );
+    d.vkGetImageSubresourceLayout( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkImageSubresource *>( pSubresource ), reinterpret_cast< VkSubresourceLayout *>( pLayout ) );
   }
+
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE SubresourceLayout Device::getImageSubresourceLayout( Image image, const ImageSubresource & subresource, Dispatch const &d ) const
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout Device::getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, const ImageSubresource & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
   {
-    SubresourceLayout layout;
-    d.vkGetImageSubresourceLayout( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkImageSubresource*>( &subresource ), reinterpret_cast<VkSubresourceLayout*>( &layout ) );
+    VULKAN_HPP_NAMESPACE::SubresourceLayout layout;
+    d.vkGetImageSubresourceLayout( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkImageSubresource *>( &subresource ), reinterpret_cast< VkSubresourceLayout *>( &layout ) );
     return layout;
   }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::createImageView( const ImageViewCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, ImageView* pView, Dispatch const &d) const
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX* pProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
-    return static_cast<Result>( d.vkCreateImageView( m_device, reinterpret_cast<const VkImageViewCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkImageView*>( pView ) ) );
+    return static_cast<Result>( d.vkGetImageViewAddressNVX( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast< VkImageViewAddressPropertiesNVX *>( pProperties ) ) );
   }
+
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<ImageView>::type Device::createImageView( const ImageViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX>::type Device::getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, Dispatch const & d ) const
   {
-    ImageView view;
-    Result result = static_cast<Result>( d.vkCreateImageView( m_device, reinterpret_cast<const VkImageViewCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkImageView*>( &view ) ) );
-    return createResultValue( result, view, VULKAN_HPP_NAMESPACE_STRING"::Device::createImageView" );
+    VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX properties;
+    Result result = static_cast<Result>( d.vkGetImageViewAddressNVX( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast< VkImageViewAddressPropertiesNVX *>( &properties ) ) );
+    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageViewAddressNVX" );
   }
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<ImageView,Dispatch>>::type Device::createImageViewUnique( const ImageViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    ImageView view;
-    Result result = static_cast<Result>( d.vkCreateImageView( m_device, reinterpret_cast<const VkImageViewCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkImageView*>( &view ) ) );
-
-    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
-    return createResultValue<ImageView,Dispatch>( result, view, VULKAN_HPP_NAMESPACE_STRING"::Device::createImageViewUnique", deleter );
-  }
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroyImageView( ImageView imageView, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX* pInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
-    d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+    return d.vkGetImageViewHandleNVX( m_device, reinterpret_cast<const VkImageViewHandleInfoNVX *>( pInfo ) );
   }
+
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroyImageView( ImageView imageView, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const ImageViewHandleInfoNVX & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
   {
-    d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+    return d.vkGetImageViewHandleNVX( m_device, reinterpret_cast<const VkImageViewHandleInfoNVX *>( &info ) );
   }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroy( ImageView imageView, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
-    d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+    return static_cast<Result>( d.vkGetMemoryAndroidHardwareBufferANDROID( m_device, reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID *>( pInfo ), pBuffer ) );
   }
+
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroy( ImageView imageView, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<struct AHardwareBuffer*>::type Device::getMemoryAndroidHardwareBufferANDROID( const MemoryGetAndroidHardwareBufferInfoANDROID & info, Dispatch const & d ) const
   {
-    d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+    struct AHardwareBuffer* buffer;
+    Result result = static_cast<Result>( d.vkGetMemoryAndroidHardwareBufferANDROID( m_device, reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID *>( &info ), &buffer ) );
+    return createResultValue( result, buffer, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryAndroidHardwareBufferANDROID" );
   }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::createShaderModule( const ShaderModuleCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, ShaderModule* pShaderModule, Dispatch const &d) const
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
-    return static_cast<Result>( d.vkCreateShaderModule( m_device, reinterpret_cast<const VkShaderModuleCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkShaderModule*>( pShaderModule ) ) );
+    return static_cast<Result>( d.vkGetMemoryFdKHR( m_device, reinterpret_cast<const VkMemoryGetFdInfoKHR *>( pGetFdInfo ), pFd ) );
   }
+
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<ShaderModule>::type Device::createShaderModule( const ShaderModuleCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<int>::type Device::getMemoryFdKHR( const MemoryGetFdInfoKHR & getFdInfo, Dispatch const & d ) const
   {
-    ShaderModule shaderModule;
-    Result result = static_cast<Result>( d.vkCreateShaderModule( m_device, reinterpret_cast<const VkShaderModuleCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkShaderModule*>( &shaderModule ) ) );
-    return createResultValue( result, shaderModule, VULKAN_HPP_NAMESPACE_STRING"::Device::createShaderModule" );
+    int fd;
+    Result result = static_cast<Result>( d.vkGetMemoryFdKHR( m_device, reinterpret_cast<const VkMemoryGetFdInfoKHR *>( &getFdInfo ), &fd ) );
+    return createResultValue( result, fd, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdKHR" );
   }
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<ShaderModule,Dispatch>>::type Device::createShaderModuleUnique( const ShaderModuleCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    ShaderModule shaderModule;
-    Result result = static_cast<Result>( d.vkCreateShaderModule( m_device, reinterpret_cast<const VkShaderModuleCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkShaderModule*>( &shaderModule ) ) );
-
-    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
-    return createResultValue<ShaderModule,Dispatch>( result, shaderModule, VULKAN_HPP_NAMESPACE_STRING"::Device::createShaderModuleUnique", deleter );
-  }
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroyShaderModule( ShaderModule shaderModule, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR* pMemoryFdProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
-    d.vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+    return static_cast<Result>( d.vkGetMemoryFdPropertiesKHR( m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), fd, reinterpret_cast< VkMemoryFdPropertiesKHR *>( pMemoryFdProperties ) ) );
   }
+
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroyShaderModule( ShaderModule shaderModule, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR>::type Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, Dispatch const & d ) const
   {
-    d.vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+    VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR memoryFdProperties;
+    Result result = static_cast<Result>( d.vkGetMemoryFdPropertiesKHR( m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), fd, reinterpret_cast< VkMemoryFdPropertiesKHR *>( &memoryFdProperties ) ) );
+    return createResultValue( result, memoryFdProperties, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdPropertiesKHR" );
   }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroy( ShaderModule shaderModule, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
-    d.vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+    return static_cast<Result>( d.vkGetMemoryHostPointerPropertiesEXT( m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), pHostPointer, reinterpret_cast< VkMemoryHostPointerPropertiesEXT *>( pMemoryHostPointerProperties ) ) );
   }
+
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroy( ShaderModule shaderModule, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT>::type Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, Dispatch const & d ) const
   {
-    d.vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+    VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT memoryHostPointerProperties;
+    Result result = static_cast<Result>( d.vkGetMemoryHostPointerPropertiesEXT( m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), pHostPointer, reinterpret_cast< VkMemoryHostPointerPropertiesEXT *>( &memoryHostPointerProperties ) ) );
+    return createResultValue( result, memoryHostPointerProperties, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryHostPointerPropertiesEXT" );
   }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::createPipelineCache( const PipelineCacheCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, PipelineCache* pPipelineCache, Dispatch const &d) const
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
-    return static_cast<Result>( d.vkCreatePipelineCache( m_device, reinterpret_cast<const VkPipelineCacheCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkPipelineCache*>( pPipelineCache ) ) );
+    return static_cast<Result>( d.vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) );
   }
+
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<PipelineCache>::type Device::createPipelineCache( const PipelineCacheCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type Device::getMemoryWin32HandleKHR( const MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const
   {
-    PipelineCache pipelineCache;
-    Result result = static_cast<Result>( d.vkCreatePipelineCache( m_device, reinterpret_cast<const VkPipelineCacheCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipelineCache*>( &pipelineCache ) ) );
-    return createResultValue( result, pipelineCache, VULKAN_HPP_NAMESPACE_STRING"::Device::createPipelineCache" );
-  }
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<PipelineCache,Dispatch>>::type Device::createPipelineCacheUnique( const PipelineCacheCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    PipelineCache pipelineCache;
-    Result result = static_cast<Result>( d.vkCreatePipelineCache( m_device, reinterpret_cast<const VkPipelineCacheCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipelineCache*>( &pipelineCache ) ) );
-
-    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
-    return createResultValue<PipelineCache,Dispatch>( result, pipelineCache, VULKAN_HPP_NAMESPACE_STRING"::Device::createPipelineCacheUnique", deleter );
-  }
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroyPipelineCache( PipelineCache pipelineCache, const AllocationCallbacks* pAllocator, Dispatch const &d) const
-  {
-    d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroyPipelineCache( PipelineCache pipelineCache, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+    HANDLE handle;
+    Result result = static_cast<Result>( d.vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle ) );
+    return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleKHR" );
   }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroy( PipelineCache pipelineCache, const AllocationCallbacks* pAllocator, Dispatch const &d) const
-  {
-    d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroy( PipelineCache pipelineCache, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::getPipelineCacheData( PipelineCache pipelineCache, size_t* pDataSize, void* pData, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), pDataSize, pData ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t,Allocator>>::type Device::getPipelineCacheData( PipelineCache pipelineCache, Dispatch const &d ) const
-  {
-    std::vector<uint8_t,Allocator> data;
-    size_t dataSize;
-    Result result;
-    do
-    {
-      result = static_cast<Result>( d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, nullptr ) );
-      if ( ( result == Result::eSuccess ) && dataSize )
-      {
-        data.resize( dataSize );
-        result = static_cast<Result>( d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, reinterpret_cast<void*>( data.data() ) ) );
-      }
-    } while ( result == Result::eIncomplete );
-    VULKAN_HPP_ASSERT( dataSize <= data.size() );
-    data.resize( dataSize );
-    return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineCacheData" );
-  }
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t,Allocator>>::type Device::getPipelineCacheData( PipelineCache pipelineCache, Allocator const& vectorAllocator, Dispatch const &d ) const
-  {
-    std::vector<uint8_t,Allocator> data( vectorAllocator );
-    size_t dataSize;
-    Result result;
-    do
-    {
-      result = static_cast<Result>( d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, nullptr ) );
-      if ( ( result == Result::eSuccess ) && dataSize )
-      {
-        data.resize( dataSize );
-        result = static_cast<Result>( d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, reinterpret_cast<void*>( data.data() ) ) );
-      }
-    } while ( result == Result::eIncomplete );
-    VULKAN_HPP_ASSERT( dataSize <= data.size() );
-    data.resize( dataSize );
-    return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineCacheData" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::mergePipelineCaches( PipelineCache dstCache, uint32_t srcCacheCount, const PipelineCache* pSrcCaches, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkMergePipelineCaches( m_device, static_cast<VkPipelineCache>( dstCache ), srcCacheCount, reinterpret_cast<const VkPipelineCache*>( pSrcCaches ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<void>::type Device::mergePipelineCaches( PipelineCache dstCache, ArrayProxy<const PipelineCache> srcCaches, Dispatch const &d ) const
-  {
-    Result result = static_cast<Result>( d.vkMergePipelineCaches( m_device, static_cast<VkPipelineCache>( dstCache ), srcCaches.size() , reinterpret_cast<const VkPipelineCache*>( srcCaches.data() ) ) );
-    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::mergePipelineCaches" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::createGraphicsPipelines( PipelineCache pipelineCache, uint32_t createInfoCount, const GraphicsPipelineCreateInfo* pCreateInfos, const AllocationCallbacks* pAllocator, Pipeline* pPipelines, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfoCount, reinterpret_cast<const VkGraphicsPipelineCreateInfo*>( pCreateInfos ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkPipeline*>( pPipelines ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<Pipeline,Allocator>>::type Device::createGraphicsPipelines( PipelineCache pipelineCache, ArrayProxy<const GraphicsPipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    std::vector<Pipeline,Allocator> pipelines( createInfos.size() );
-    Result result = static_cast<Result>( d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkGraphicsPipelineCreateInfo*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( pipelines.data() ) ) );
-    return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createGraphicsPipelines" );
-  }
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<Pipeline,Allocator>>::type Device::createGraphicsPipelines( PipelineCache pipelineCache, ArrayProxy<const GraphicsPipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const
-  {
-    std::vector<Pipeline,Allocator> pipelines( createInfos.size(), vectorAllocator );
-    Result result = static_cast<Result>( d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkGraphicsPipelineCreateInfo*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( pipelines.data() ) ) );
-    return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createGraphicsPipelines" );
-  }
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<Pipeline>::type Device::createGraphicsPipeline( PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    Pipeline pipeline;
-    Result result = static_cast<Result>( d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1 , reinterpret_cast<const VkGraphicsPipelineCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( &pipeline ) ) );
-    return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createGraphicsPipeline" );
-  }
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>>::type Device::createGraphicsPipelinesUnique( PipelineCache pipelineCache, ArrayProxy<const GraphicsPipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    static_assert( sizeof( Pipeline ) <= sizeof( UniquePipeline ), "Pipeline is greater than UniquePipeline!" );
-    std::vector<UniquePipeline, Allocator> pipelines;
-    pipelines.reserve( createInfos.size() );
-    Pipeline* buffer = reinterpret_cast<Pipeline*>( reinterpret_cast<char*>( pipelines.data() ) + createInfos.size() * ( sizeof( UniquePipeline ) - sizeof( Pipeline ) ) );
-    Result result = static_cast<Result>(d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkGraphicsPipelineCreateInfo*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( buffer ) ) );
-
-    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
-    for ( size_t i=0 ; i<createInfos.size() ; i++ )
-    {
-      pipelines.push_back( UniquePipeline( buffer[i], deleter ) );
-    }
-
-    return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique" );
-  }
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>>::type Device::createGraphicsPipelinesUnique( PipelineCache pipelineCache, ArrayProxy<const GraphicsPipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const
-  {
-    static_assert( sizeof( Pipeline ) <= sizeof( UniquePipeline ), "Pipeline is greater than UniquePipeline!" );
-    std::vector<UniquePipeline, Allocator> pipelines;
-    pipelines.reserve( createInfos.size() );
-    Pipeline* buffer = reinterpret_cast<Pipeline*>( reinterpret_cast<char*>( pipelines.data() ) + createInfos.size() * ( sizeof( UniquePipeline ) - sizeof( Pipeline ) ) );
-    Result result = static_cast<Result>(d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkGraphicsPipelineCreateInfo*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( buffer ) ) );
-
-    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
-    for ( size_t i=0 ; i<createInfos.size() ; i++ )
-    {
-      pipelines.push_back( UniquePipeline( buffer[i], deleter ) );
-    }
-
-    return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique" );
-  }
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Pipeline,Dispatch>>::type Device::createGraphicsPipelineUnique( PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    Pipeline pipeline;
-    Result result = static_cast<Result>( d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1 , reinterpret_cast<const VkGraphicsPipelineCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( &pipeline ) ) );
-
-    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
-    return createResultValue<Pipeline,Dispatch>( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createGraphicsPipelineUnique", deleter );
-  }
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::createComputePipelines( PipelineCache pipelineCache, uint32_t createInfoCount, const ComputePipelineCreateInfo* pCreateInfos, const AllocationCallbacks* pAllocator, Pipeline* pPipelines, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfoCount, reinterpret_cast<const VkComputePipelineCreateInfo*>( pCreateInfos ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkPipeline*>( pPipelines ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<Pipeline,Allocator>>::type Device::createComputePipelines( PipelineCache pipelineCache, ArrayProxy<const ComputePipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    std::vector<Pipeline,Allocator> pipelines( createInfos.size() );
-    Result result = static_cast<Result>( d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkComputePipelineCreateInfo*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( pipelines.data() ) ) );
-    return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createComputePipelines" );
-  }
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<Pipeline,Allocator>>::type Device::createComputePipelines( PipelineCache pipelineCache, ArrayProxy<const ComputePipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const
-  {
-    std::vector<Pipeline,Allocator> pipelines( createInfos.size(), vectorAllocator );
-    Result result = static_cast<Result>( d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkComputePipelineCreateInfo*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( pipelines.data() ) ) );
-    return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createComputePipelines" );
-  }
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<Pipeline>::type Device::createComputePipeline( PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    Pipeline pipeline;
-    Result result = static_cast<Result>( d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1 , reinterpret_cast<const VkComputePipelineCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( &pipeline ) ) );
-    return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createComputePipeline" );
-  }
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>>::type Device::createComputePipelinesUnique( PipelineCache pipelineCache, ArrayProxy<const ComputePipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    static_assert( sizeof( Pipeline ) <= sizeof( UniquePipeline ), "Pipeline is greater than UniquePipeline!" );
-    std::vector<UniquePipeline, Allocator> pipelines;
-    pipelines.reserve( createInfos.size() );
-    Pipeline* buffer = reinterpret_cast<Pipeline*>( reinterpret_cast<char*>( pipelines.data() ) + createInfos.size() * ( sizeof( UniquePipeline ) - sizeof( Pipeline ) ) );
-    Result result = static_cast<Result>(d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkComputePipelineCreateInfo*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( buffer ) ) );
-
-    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
-    for ( size_t i=0 ; i<createInfos.size() ; i++ )
-    {
-      pipelines.push_back( UniquePipeline( buffer[i], deleter ) );
-    }
-
-    return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique" );
-  }
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>>::type Device::createComputePipelinesUnique( PipelineCache pipelineCache, ArrayProxy<const ComputePipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const
-  {
-    static_assert( sizeof( Pipeline ) <= sizeof( UniquePipeline ), "Pipeline is greater than UniquePipeline!" );
-    std::vector<UniquePipeline, Allocator> pipelines;
-    pipelines.reserve( createInfos.size() );
-    Pipeline* buffer = reinterpret_cast<Pipeline*>( reinterpret_cast<char*>( pipelines.data() ) + createInfos.size() * ( sizeof( UniquePipeline ) - sizeof( Pipeline ) ) );
-    Result result = static_cast<Result>(d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkComputePipelineCreateInfo*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( buffer ) ) );
-
-    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
-    for ( size_t i=0 ; i<createInfos.size() ; i++ )
-    {
-      pipelines.push_back( UniquePipeline( buffer[i], deleter ) );
-    }
-
-    return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique" );
-  }
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Pipeline,Dispatch>>::type Device::createComputePipelineUnique( PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    Pipeline pipeline;
-    Result result = static_cast<Result>( d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1 , reinterpret_cast<const VkComputePipelineCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( &pipeline ) ) );
-
-    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
-    return createResultValue<Pipeline,Dispatch>( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createComputePipelineUnique", deleter );
-  }
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroyPipeline( Pipeline pipeline, const AllocationCallbacks* pAllocator, Dispatch const &d) const
-  {
-    d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroyPipeline( Pipeline pipeline, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroy( Pipeline pipeline, const AllocationCallbacks* pAllocator, Dispatch const &d) const
-  {
-    d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroy( Pipeline pipeline, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::createPipelineLayout( const PipelineLayoutCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, PipelineLayout* pPipelineLayout, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkCreatePipelineLayout( m_device, reinterpret_cast<const VkPipelineLayoutCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkPipelineLayout*>( pPipelineLayout ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<PipelineLayout>::type Device::createPipelineLayout( const PipelineLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    PipelineLayout pipelineLayout;
-    Result result = static_cast<Result>( d.vkCreatePipelineLayout( m_device, reinterpret_cast<const VkPipelineLayoutCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipelineLayout*>( &pipelineLayout ) ) );
-    return createResultValue( result, pipelineLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createPipelineLayout" );
-  }
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<PipelineLayout,Dispatch>>::type Device::createPipelineLayoutUnique( const PipelineLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    PipelineLayout pipelineLayout;
-    Result result = static_cast<Result>( d.vkCreatePipelineLayout( m_device, reinterpret_cast<const VkPipelineLayoutCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipelineLayout*>( &pipelineLayout ) ) );
-
-    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
-    return createResultValue<PipelineLayout,Dispatch>( result, pipelineLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createPipelineLayoutUnique", deleter );
-  }
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroyPipelineLayout( PipelineLayout pipelineLayout, const AllocationCallbacks* pAllocator, Dispatch const &d) const
-  {
-    d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroyPipelineLayout( PipelineLayout pipelineLayout, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroy( PipelineLayout pipelineLayout, const AllocationCallbacks* pAllocator, Dispatch const &d) const
-  {
-    d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroy( PipelineLayout pipelineLayout, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::createSampler( const SamplerCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Sampler* pSampler, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkCreateSampler( m_device, reinterpret_cast<const VkSamplerCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSampler*>( pSampler ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<Sampler>::type Device::createSampler( const SamplerCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    Sampler sampler;
-    Result result = static_cast<Result>( d.vkCreateSampler( m_device, reinterpret_cast<const VkSamplerCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSampler*>( &sampler ) ) );
-    return createResultValue( result, sampler, VULKAN_HPP_NAMESPACE_STRING"::Device::createSampler" );
-  }
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Sampler,Dispatch>>::type Device::createSamplerUnique( const SamplerCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    Sampler sampler;
-    Result result = static_cast<Result>( d.vkCreateSampler( m_device, reinterpret_cast<const VkSamplerCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSampler*>( &sampler ) ) );
-
-    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
-    return createResultValue<Sampler,Dispatch>( result, sampler, VULKAN_HPP_NAMESPACE_STRING"::Device::createSamplerUnique", deleter );
-  }
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroySampler( Sampler sampler, const AllocationCallbacks* pAllocator, Dispatch const &d) const
-  {
-    d.vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroySampler( Sampler sampler, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    d.vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroy( Sampler sampler, const AllocationCallbacks* pAllocator, Dispatch const &d) const
-  {
-    d.vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroy( Sampler sampler, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    d.vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, DescriptorSetLayout* pSetLayout, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkCreateDescriptorSetLayout( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDescriptorSetLayout*>( pSetLayout ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<DescriptorSetLayout>::type Device::createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    DescriptorSetLayout setLayout;
-    Result result = static_cast<Result>( d.vkCreateDescriptorSetLayout( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDescriptorSetLayout*>( &setLayout ) ) );
-    return createResultValue( result, setLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorSetLayout" );
-  }
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<DescriptorSetLayout,Dispatch>>::type Device::createDescriptorSetLayoutUnique( const DescriptorSetLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    DescriptorSetLayout setLayout;
-    Result result = static_cast<Result>( d.vkCreateDescriptorSetLayout( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDescriptorSetLayout*>( &setLayout ) ) );
-
-    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
-    return createResultValue<DescriptorSetLayout,Dispatch>( result, setLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorSetLayoutUnique", deleter );
-  }
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( DescriptorSetLayout descriptorSetLayout, const AllocationCallbacks* pAllocator, Dispatch const &d) const
-  {
-    d.vkDestroyDescriptorSetLayout( m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( DescriptorSetLayout descriptorSetLayout, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    d.vkDestroyDescriptorSetLayout( m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroy( DescriptorSetLayout descriptorSetLayout, const AllocationCallbacks* pAllocator, Dispatch const &d) const
-  {
-    d.vkDestroyDescriptorSetLayout( m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroy( DescriptorSetLayout descriptorSetLayout, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    d.vkDestroyDescriptorSetLayout( m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::createDescriptorPool( const DescriptorPoolCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, DescriptorPool* pDescriptorPool, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkCreateDescriptorPool( m_device, reinterpret_cast<const VkDescriptorPoolCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDescriptorPool*>( pDescriptorPool ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<DescriptorPool>::type Device::createDescriptorPool( const DescriptorPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    DescriptorPool descriptorPool;
-    Result result = static_cast<Result>( d.vkCreateDescriptorPool( m_device, reinterpret_cast<const VkDescriptorPoolCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDescriptorPool*>( &descriptorPool ) ) );
-    return createResultValue( result, descriptorPool, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorPool" );
-  }
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<DescriptorPool,Dispatch>>::type Device::createDescriptorPoolUnique( const DescriptorPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    DescriptorPool descriptorPool;
-    Result result = static_cast<Result>( d.vkCreateDescriptorPool( m_device, reinterpret_cast<const VkDescriptorPoolCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDescriptorPool*>( &descriptorPool ) ) );
-
-    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
-    return createResultValue<DescriptorPool,Dispatch>( result, descriptorPool, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorPoolUnique", deleter );
-  }
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroyDescriptorPool( DescriptorPool descriptorPool, const AllocationCallbacks* pAllocator, Dispatch const &d) const
-  {
-    d.vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroyDescriptorPool( DescriptorPool descriptorPool, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    d.vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroy( DescriptorPool descriptorPool, const AllocationCallbacks* pAllocator, Dispatch const &d) const
-  {
-    d.vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroy( DescriptorPool descriptorPool, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    d.vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::resetDescriptorPool( DescriptorPool descriptorPool, DescriptorPoolResetFlags flags, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkResetDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) ) );
-  }
-#else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<void>::type Device::resetDescriptorPool( DescriptorPool descriptorPool, DescriptorPoolResetFlags flags, Dispatch const &d ) const
-  {
-    Result result = static_cast<Result>( d.vkResetDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) ) );
-    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::resetDescriptorPool" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::allocateDescriptorSets( const DescriptorSetAllocateInfo* pAllocateInfo, DescriptorSet* pDescriptorSets, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkAllocateDescriptorSets( m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo*>( pAllocateInfo ), reinterpret_cast<VkDescriptorSet*>( pDescriptorSets ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<DescriptorSet,Allocator>>::type Device::allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const &d ) const
-  {
-    std::vector<DescriptorSet,Allocator> descriptorSets( allocateInfo.descriptorSetCount );
-    Result result = static_cast<Result>( d.vkAllocateDescriptorSets( m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo*>( &allocateInfo ), reinterpret_cast<VkDescriptorSet*>( descriptorSets.data() ) ) );
-    return createResultValue( result, descriptorSets, VULKAN_HPP_NAMESPACE_STRING"::Device::allocateDescriptorSets" );
-  }
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<DescriptorSet,Allocator>>::type Device::allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const
-  {
-    std::vector<DescriptorSet,Allocator> descriptorSets( allocateInfo.descriptorSetCount, vectorAllocator );
-    Result result = static_cast<Result>( d.vkAllocateDescriptorSets( m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo*>( &allocateInfo ), reinterpret_cast<VkDescriptorSet*>( descriptorSets.data() ) ) );
-    return createResultValue( result, descriptorSets, VULKAN_HPP_NAMESPACE_STRING"::Device::allocateDescriptorSets" );
-  }
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<DescriptorSet,Dispatch>,Allocator>>::type Device::allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const &d ) const
-  {
-    static_assert( sizeof( DescriptorSet ) <= sizeof( UniqueDescriptorSet ), "DescriptorSet is greater than UniqueDescriptorSet!" );
-    std::vector<UniqueDescriptorSet, Allocator> descriptorSets;
-    descriptorSets.reserve( allocateInfo.descriptorSetCount );
-    DescriptorSet* buffer = reinterpret_cast<DescriptorSet*>( reinterpret_cast<char*>( descriptorSets.data() ) + allocateInfo.descriptorSetCount * ( sizeof( UniqueDescriptorSet ) - sizeof( DescriptorSet ) ) );
-    Result result = static_cast<Result>(d.vkAllocateDescriptorSets( m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo*>( &allocateInfo ), reinterpret_cast<VkDescriptorSet*>( buffer ) ) );
-
-    PoolFree<Device,DescriptorPool,Dispatch> deleter( *this, allocateInfo.descriptorPool, d );
-    for ( size_t i=0 ; i<allocateInfo.descriptorSetCount ; i++ )
-    {
-      descriptorSets.push_back( UniqueDescriptorSet( buffer[i], deleter ) );
-    }
-
-    return createResultValue( result, descriptorSets, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" );
-  }
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<DescriptorSet,Dispatch>,Allocator>>::type Device::allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const
-  {
-    static_assert( sizeof( DescriptorSet ) <= sizeof( UniqueDescriptorSet ), "DescriptorSet is greater than UniqueDescriptorSet!" );
-    std::vector<UniqueDescriptorSet, Allocator> descriptorSets;
-    descriptorSets.reserve( allocateInfo.descriptorSetCount );
-    DescriptorSet* buffer = reinterpret_cast<DescriptorSet*>( reinterpret_cast<char*>( descriptorSets.data() ) + allocateInfo.descriptorSetCount * ( sizeof( UniqueDescriptorSet ) - sizeof( DescriptorSet ) ) );
-    Result result = static_cast<Result>(d.vkAllocateDescriptorSets( m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo*>( &allocateInfo ), reinterpret_cast<VkDescriptorSet*>( buffer ) ) );
-
-    PoolFree<Device,DescriptorPool,Dispatch> deleter( *this, allocateInfo.descriptorPool, d );
-    for ( size_t i=0 ; i<allocateInfo.descriptorSetCount ; i++ )
-    {
-      descriptorSets.push_back( UniqueDescriptorSet( buffer[i], deleter ) );
-    }
-
-    return createResultValue( result, descriptorSets, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" );
-  }
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::freeDescriptorSets( DescriptorPool descriptorPool, uint32_t descriptorSetCount, const DescriptorSet* pDescriptorSets, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkFreeDescriptorSets( m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSetCount, reinterpret_cast<const VkDescriptorSet*>( pDescriptorSets ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<void>::type Device::freeDescriptorSets( DescriptorPool descriptorPool, ArrayProxy<const DescriptorSet> descriptorSets, Dispatch const &d ) const
-  {
-    Result result = static_cast<Result>( d.vkFreeDescriptorSets( m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSets.size() , reinterpret_cast<const VkDescriptorSet*>( descriptorSets.data() ) ) );
-    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::freeDescriptorSets" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::free( DescriptorPool descriptorPool, uint32_t descriptorSetCount, const DescriptorSet* pDescriptorSets, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkFreeDescriptorSets( m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSetCount, reinterpret_cast<const VkDescriptorSet*>( pDescriptorSets ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<void>::type Device::free( DescriptorPool descriptorPool, ArrayProxy<const DescriptorSet> descriptorSets, Dispatch const &d ) const
-  {
-    Result result = static_cast<Result>( d.vkFreeDescriptorSets( m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSets.size() , reinterpret_cast<const VkDescriptorSet*>( descriptorSets.data() ) ) );
-    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::free" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::updateDescriptorSets( uint32_t descriptorWriteCount, const WriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const CopyDescriptorSet* pDescriptorCopies, Dispatch const &d) const
-  {
-    d.vkUpdateDescriptorSets( m_device, descriptorWriteCount, reinterpret_cast<const VkWriteDescriptorSet*>( pDescriptorWrites ), descriptorCopyCount, reinterpret_cast<const VkCopyDescriptorSet*>( pDescriptorCopies ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::updateDescriptorSets( ArrayProxy<const WriteDescriptorSet> descriptorWrites, ArrayProxy<const CopyDescriptorSet> descriptorCopies, Dispatch const &d ) const
-  {
-    d.vkUpdateDescriptorSets( m_device, descriptorWrites.size() , reinterpret_cast<const VkWriteDescriptorSet*>( descriptorWrites.data() ), descriptorCopies.size() , reinterpret_cast<const VkCopyDescriptorSet*>( descriptorCopies.data() ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::createFramebuffer( const FramebufferCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Framebuffer* pFramebuffer, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkCreateFramebuffer( m_device, reinterpret_cast<const VkFramebufferCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkFramebuffer*>( pFramebuffer ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<Framebuffer>::type Device::createFramebuffer( const FramebufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    Framebuffer framebuffer;
-    Result result = static_cast<Result>( d.vkCreateFramebuffer( m_device, reinterpret_cast<const VkFramebufferCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkFramebuffer*>( &framebuffer ) ) );
-    return createResultValue( result, framebuffer, VULKAN_HPP_NAMESPACE_STRING"::Device::createFramebuffer" );
-  }
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Framebuffer,Dispatch>>::type Device::createFramebufferUnique( const FramebufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    Framebuffer framebuffer;
-    Result result = static_cast<Result>( d.vkCreateFramebuffer( m_device, reinterpret_cast<const VkFramebufferCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkFramebuffer*>( &framebuffer ) ) );
-
-    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
-    return createResultValue<Framebuffer,Dispatch>( result, framebuffer, VULKAN_HPP_NAMESPACE_STRING"::Device::createFramebufferUnique", deleter );
-  }
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroyFramebuffer( Framebuffer framebuffer, const AllocationCallbacks* pAllocator, Dispatch const &d) const
-  {
-    d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroyFramebuffer( Framebuffer framebuffer, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroy( Framebuffer framebuffer, const AllocationCallbacks* pAllocator, Dispatch const &d) const
-  {
-    d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroy( Framebuffer framebuffer, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::createRenderPass( const RenderPassCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, RenderPass* pRenderPass, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkCreateRenderPass( m_device, reinterpret_cast<const VkRenderPassCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkRenderPass*>( pRenderPass ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<RenderPass>::type Device::createRenderPass( const RenderPassCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    RenderPass renderPass;
-    Result result = static_cast<Result>( d.vkCreateRenderPass( m_device, reinterpret_cast<const VkRenderPassCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkRenderPass*>( &renderPass ) ) );
-    return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING"::Device::createRenderPass" );
-  }
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<RenderPass,Dispatch>>::type Device::createRenderPassUnique( const RenderPassCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    RenderPass renderPass;
-    Result result = static_cast<Result>( d.vkCreateRenderPass( m_device, reinterpret_cast<const VkRenderPassCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkRenderPass*>( &renderPass ) ) );
-
-    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
-    return createResultValue<RenderPass,Dispatch>( result, renderPass, VULKAN_HPP_NAMESPACE_STRING"::Device::createRenderPassUnique", deleter );
-  }
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroyRenderPass( RenderPass renderPass, const AllocationCallbacks* pAllocator, Dispatch const &d) const
-  {
-    d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroyRenderPass( RenderPass renderPass, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroy( RenderPass renderPass, const AllocationCallbacks* pAllocator, Dispatch const &d) const
-  {
-    d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroy( RenderPass renderPass, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::getRenderAreaGranularity( RenderPass renderPass, Extent2D* pGranularity, Dispatch const &d) const
-  {
-    d.vkGetRenderAreaGranularity( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<VkExtent2D*>( pGranularity ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Extent2D Device::getRenderAreaGranularity( RenderPass renderPass, Dispatch const &d ) const
-  {
-    Extent2D granularity;
-    d.vkGetRenderAreaGranularity( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<VkExtent2D*>( &granularity ) );
-    return granularity;
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::createCommandPool( const CommandPoolCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, CommandPool* pCommandPool, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkCreateCommandPool( m_device, reinterpret_cast<const VkCommandPoolCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkCommandPool*>( pCommandPool ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<CommandPool>::type Device::createCommandPool( const CommandPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    CommandPool commandPool;
-    Result result = static_cast<Result>( d.vkCreateCommandPool( m_device, reinterpret_cast<const VkCommandPoolCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkCommandPool*>( &commandPool ) ) );
-    return createResultValue( result, commandPool, VULKAN_HPP_NAMESPACE_STRING"::Device::createCommandPool" );
-  }
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<CommandPool,Dispatch>>::type Device::createCommandPoolUnique( const CommandPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    CommandPool commandPool;
-    Result result = static_cast<Result>( d.vkCreateCommandPool( m_device, reinterpret_cast<const VkCommandPoolCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkCommandPool*>( &commandPool ) ) );
-
-    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
-    return createResultValue<CommandPool,Dispatch>( result, commandPool, VULKAN_HPP_NAMESPACE_STRING"::Device::createCommandPoolUnique", deleter );
-  }
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroyCommandPool( CommandPool commandPool, const AllocationCallbacks* pAllocator, Dispatch const &d) const
-  {
-    d.vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroyCommandPool( CommandPool commandPool, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    d.vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroy( CommandPool commandPool, const AllocationCallbacks* pAllocator, Dispatch const &d) const
-  {
-    d.vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroy( CommandPool commandPool, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    d.vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::resetCommandPool( CommandPool commandPool, CommandPoolResetFlags flags, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkResetCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) ) );
-  }
-#else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<void>::type Device::resetCommandPool( CommandPool commandPool, CommandPoolResetFlags flags, Dispatch const &d ) const
-  {
-    Result result = static_cast<Result>( d.vkResetCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) ) );
-    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::resetCommandPool" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::allocateCommandBuffers( const CommandBufferAllocateInfo* pAllocateInfo, CommandBuffer* pCommandBuffers, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkAllocateCommandBuffers( m_device, reinterpret_cast<const VkCommandBufferAllocateInfo*>( pAllocateInfo ), reinterpret_cast<VkCommandBuffer*>( pCommandBuffers ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<CommandBuffer,Allocator>>::type Device::allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Dispatch const &d ) const
-  {
-    std::vector<CommandBuffer,Allocator> commandBuffers( allocateInfo.commandBufferCount );
-    Result result = static_cast<Result>( d.vkAllocateCommandBuffers( m_device, reinterpret_cast<const VkCommandBufferAllocateInfo*>( &allocateInfo ), reinterpret_cast<VkCommandBuffer*>( commandBuffers.data() ) ) );
-    return createResultValue( result, commandBuffers, VULKAN_HPP_NAMESPACE_STRING"::Device::allocateCommandBuffers" );
-  }
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<CommandBuffer,Allocator>>::type Device::allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const
-  {
-    std::vector<CommandBuffer,Allocator> commandBuffers( allocateInfo.commandBufferCount, vectorAllocator );
-    Result result = static_cast<Result>( d.vkAllocateCommandBuffers( m_device, reinterpret_cast<const VkCommandBufferAllocateInfo*>( &allocateInfo ), reinterpret_cast<VkCommandBuffer*>( commandBuffers.data() ) ) );
-    return createResultValue( result, commandBuffers, VULKAN_HPP_NAMESPACE_STRING"::Device::allocateCommandBuffers" );
-  }
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<CommandBuffer,Dispatch>,Allocator>>::type Device::allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, Dispatch const &d ) const
-  {
-    static_assert( sizeof( CommandBuffer ) <= sizeof( UniqueCommandBuffer ), "CommandBuffer is greater than UniqueCommandBuffer!" );
-    std::vector<UniqueCommandBuffer, Allocator> commandBuffers;
-    commandBuffers.reserve( allocateInfo.commandBufferCount );
-    CommandBuffer* buffer = reinterpret_cast<CommandBuffer*>( reinterpret_cast<char*>( commandBuffers.data() ) + allocateInfo.commandBufferCount * ( sizeof( UniqueCommandBuffer ) - sizeof( CommandBuffer ) ) );
-    Result result = static_cast<Result>(d.vkAllocateCommandBuffers( m_device, reinterpret_cast<const VkCommandBufferAllocateInfo*>( &allocateInfo ), reinterpret_cast<VkCommandBuffer*>( buffer ) ) );
-
-    PoolFree<Device,CommandPool,Dispatch> deleter( *this, allocateInfo.commandPool, d );
-    for ( size_t i=0 ; i<allocateInfo.commandBufferCount ; i++ )
-    {
-      commandBuffers.push_back( UniqueCommandBuffer( buffer[i], deleter ) );
-    }
-
-    return createResultValue( result, commandBuffers, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" );
-  }
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<CommandBuffer,Dispatch>,Allocator>>::type Device::allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const
-  {
-    static_assert( sizeof( CommandBuffer ) <= sizeof( UniqueCommandBuffer ), "CommandBuffer is greater than UniqueCommandBuffer!" );
-    std::vector<UniqueCommandBuffer, Allocator> commandBuffers;
-    commandBuffers.reserve( allocateInfo.commandBufferCount );
-    CommandBuffer* buffer = reinterpret_cast<CommandBuffer*>( reinterpret_cast<char*>( commandBuffers.data() ) + allocateInfo.commandBufferCount * ( sizeof( UniqueCommandBuffer ) - sizeof( CommandBuffer ) ) );
-    Result result = static_cast<Result>(d.vkAllocateCommandBuffers( m_device, reinterpret_cast<const VkCommandBufferAllocateInfo*>( &allocateInfo ), reinterpret_cast<VkCommandBuffer*>( buffer ) ) );
-
-    PoolFree<Device,CommandPool,Dispatch> deleter( *this, allocateInfo.commandPool, d );
-    for ( size_t i=0 ; i<allocateInfo.commandBufferCount ; i++ )
-    {
-      commandBuffers.push_back( UniqueCommandBuffer( buffer[i], deleter ) );
-    }
-
-    return createResultValue( result, commandBuffers, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" );
-  }
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::freeCommandBuffers( CommandPool commandPool, uint32_t commandBufferCount, const CommandBuffer* pCommandBuffers, Dispatch const &d) const
-  {
-    d.vkFreeCommandBuffers( m_device, static_cast<VkCommandPool>( commandPool ), commandBufferCount, reinterpret_cast<const VkCommandBuffer*>( pCommandBuffers ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::freeCommandBuffers( CommandPool commandPool, ArrayProxy<const CommandBuffer> commandBuffers, Dispatch const &d ) const
-  {
-    d.vkFreeCommandBuffers( m_device, static_cast<VkCommandPool>( commandPool ), commandBuffers.size() , reinterpret_cast<const VkCommandBuffer*>( commandBuffers.data() ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::free( CommandPool commandPool, uint32_t commandBufferCount, const CommandBuffer* pCommandBuffers, Dispatch const &d) const
-  {
-    d.vkFreeCommandBuffers( m_device, static_cast<VkCommandPool>( commandPool ), commandBufferCount, reinterpret_cast<const VkCommandBuffer*>( pCommandBuffers ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::free( CommandPool commandPool, ArrayProxy<const CommandBuffer> commandBuffers, Dispatch const &d ) const
-  {
-    d.vkFreeCommandBuffers( m_device, static_cast<VkCommandPool>( commandPool ), commandBuffers.size() , reinterpret_cast<const VkCommandBuffer*>( commandBuffers.data() ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::createSharedSwapchainsKHR( uint32_t swapchainCount, const SwapchainCreateInfoKHR* pCreateInfos, const AllocationCallbacks* pAllocator, SwapchainKHR* pSwapchains, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkCreateSharedSwapchainsKHR( m_device, swapchainCount, reinterpret_cast<const VkSwapchainCreateInfoKHR*>( pCreateInfos ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSwapchainKHR*>( pSwapchains ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<SwapchainKHR,Allocator>>::type Device::createSharedSwapchainsKHR( ArrayProxy<const SwapchainCreateInfoKHR> createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    std::vector<SwapchainKHR,Allocator> swapchains( createInfos.size() );
-    Result result = static_cast<Result>( d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size() , reinterpret_cast<const VkSwapchainCreateInfoKHR*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSwapchainKHR*>( swapchains.data() ) ) );
-    return createResultValue( result, swapchains, VULKAN_HPP_NAMESPACE_STRING"::Device::createSharedSwapchainsKHR" );
-  }
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<SwapchainKHR,Allocator>>::type Device::createSharedSwapchainsKHR( ArrayProxy<const SwapchainCreateInfoKHR> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const
-  {
-    std::vector<SwapchainKHR,Allocator> swapchains( createInfos.size(), vectorAllocator );
-    Result result = static_cast<Result>( d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size() , reinterpret_cast<const VkSwapchainCreateInfoKHR*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSwapchainKHR*>( swapchains.data() ) ) );
-    return createResultValue( result, swapchains, VULKAN_HPP_NAMESPACE_STRING"::Device::createSharedSwapchainsKHR" );
-  }
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<SwapchainKHR>::type Device::createSharedSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    SwapchainKHR swapchain;
-    Result result = static_cast<Result>( d.vkCreateSharedSwapchainsKHR( m_device, 1 , reinterpret_cast<const VkSwapchainCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSwapchainKHR*>( &swapchain ) ) );
-    return createResultValue( result, swapchain, VULKAN_HPP_NAMESPACE_STRING"::Device::createSharedSwapchainKHR" );
-  }
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<SwapchainKHR,Dispatch>,Allocator>>::type Device::createSharedSwapchainsKHRUnique( ArrayProxy<const SwapchainCreateInfoKHR> createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    static_assert( sizeof( SwapchainKHR ) <= sizeof( UniqueSwapchainKHR ), "SwapchainKHR is greater than UniqueSwapchainKHR!" );
-    std::vector<UniqueSwapchainKHR, Allocator> swapchainKHRs;
-    swapchainKHRs.reserve( createInfos.size() );
-    SwapchainKHR* buffer = reinterpret_cast<SwapchainKHR*>( reinterpret_cast<char*>( swapchainKHRs.data() ) + createInfos.size() * ( sizeof( UniqueSwapchainKHR ) - sizeof( SwapchainKHR ) ) );
-    Result result = static_cast<Result>(d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size() , reinterpret_cast<const VkSwapchainCreateInfoKHR*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSwapchainKHR*>( buffer ) ) );
-
-    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
-    for ( size_t i=0 ; i<createInfos.size() ; i++ )
-    {
-      swapchainKHRs.push_back( UniqueSwapchainKHR( buffer[i], deleter ) );
-    }
-
-    return createResultValue( result, swapchainKHRs, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" );
-  }
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<SwapchainKHR,Dispatch>,Allocator>>::type Device::createSharedSwapchainsKHRUnique( ArrayProxy<const SwapchainCreateInfoKHR> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const
-  {
-    static_assert( sizeof( SwapchainKHR ) <= sizeof( UniqueSwapchainKHR ), "SwapchainKHR is greater than UniqueSwapchainKHR!" );
-    std::vector<UniqueSwapchainKHR, Allocator> swapchainKHRs;
-    swapchainKHRs.reserve( createInfos.size() );
-    SwapchainKHR* buffer = reinterpret_cast<SwapchainKHR*>( reinterpret_cast<char*>( swapchainKHRs.data() ) + createInfos.size() * ( sizeof( UniqueSwapchainKHR ) - sizeof( SwapchainKHR ) ) );
-    Result result = static_cast<Result>(d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size() , reinterpret_cast<const VkSwapchainCreateInfoKHR*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSwapchainKHR*>( buffer ) ) );
-
-    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
-    for ( size_t i=0 ; i<createInfos.size() ; i++ )
-    {
-      swapchainKHRs.push_back( UniqueSwapchainKHR( buffer[i], deleter ) );
-    }
-
-    return createResultValue( result, swapchainKHRs, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" );
-  }
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SwapchainKHR,Dispatch>>::type Device::createSharedSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    SwapchainKHR swapchain;
-    Result result = static_cast<Result>( d.vkCreateSharedSwapchainsKHR( m_device, 1 , reinterpret_cast<const VkSwapchainCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSwapchainKHR*>( &swapchain ) ) );
-
-    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
-    return createResultValue<SwapchainKHR,Dispatch>( result, swapchain, VULKAN_HPP_NAMESPACE_STRING"::Device::createSharedSwapchainKHRUnique", deleter );
-  }
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::createSwapchainKHR( const SwapchainCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SwapchainKHR* pSwapchain, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkCreateSwapchainKHR( m_device, reinterpret_cast<const VkSwapchainCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSwapchainKHR*>( pSwapchain ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<SwapchainKHR>::type Device::createSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    SwapchainKHR swapchain;
-    Result result = static_cast<Result>( d.vkCreateSwapchainKHR( m_device, reinterpret_cast<const VkSwapchainCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSwapchainKHR*>( &swapchain ) ) );
-    return createResultValue( result, swapchain, VULKAN_HPP_NAMESPACE_STRING"::Device::createSwapchainKHR" );
-  }
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SwapchainKHR,Dispatch>>::type Device::createSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    SwapchainKHR swapchain;
-    Result result = static_cast<Result>( d.vkCreateSwapchainKHR( m_device, reinterpret_cast<const VkSwapchainCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSwapchainKHR*>( &swapchain ) ) );
-
-    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
-    return createResultValue<SwapchainKHR,Dispatch>( result, swapchain, VULKAN_HPP_NAMESPACE_STRING"::Device::createSwapchainKHRUnique", deleter );
-  }
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroySwapchainKHR( SwapchainKHR swapchain, const AllocationCallbacks* pAllocator, Dispatch const &d) const
-  {
-    d.vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroySwapchainKHR( SwapchainKHR swapchain, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    d.vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroy( SwapchainKHR swapchain, const AllocationCallbacks* pAllocator, Dispatch const &d) const
-  {
-    d.vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroy( SwapchainKHR swapchain, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    d.vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::getSwapchainImagesKHR( SwapchainKHR swapchain, uint32_t* pSwapchainImageCount, Image* pSwapchainImages, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), pSwapchainImageCount, reinterpret_cast<VkImage*>( pSwapchainImages ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<Image,Allocator>>::type Device::getSwapchainImagesKHR( SwapchainKHR swapchain, Dispatch const &d ) const
-  {
-    std::vector<Image,Allocator> swapchainImages;
-    uint32_t swapchainImageCount;
-    Result result;
-    do
-    {
-      result = static_cast<Result>( d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, nullptr ) );
-      if ( ( result == Result::eSuccess ) && swapchainImageCount )
-      {
-        swapchainImages.resize( swapchainImageCount );
-        result = static_cast<Result>( d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, reinterpret_cast<VkImage*>( swapchainImages.data() ) ) );
-      }
-    } while ( result == Result::eIncomplete );
-    VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() );
-    swapchainImages.resize( swapchainImageCount );
-    return createResultValue( result, swapchainImages, VULKAN_HPP_NAMESPACE_STRING"::Device::getSwapchainImagesKHR" );
-  }
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<Image,Allocator>>::type Device::getSwapchainImagesKHR( SwapchainKHR swapchain, Allocator const& vectorAllocator, Dispatch const &d ) const
-  {
-    std::vector<Image,Allocator> swapchainImages( vectorAllocator );
-    uint32_t swapchainImageCount;
-    Result result;
-    do
-    {
-      result = static_cast<Result>( d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, nullptr ) );
-      if ( ( result == Result::eSuccess ) && swapchainImageCount )
-      {
-        swapchainImages.resize( swapchainImageCount );
-        result = static_cast<Result>( d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, reinterpret_cast<VkImage*>( swapchainImages.data() ) ) );
-      }
-    } while ( result == Result::eIncomplete );
-    VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() );
-    swapchainImages.resize( swapchainImageCount );
-    return createResultValue( result, swapchainImages, VULKAN_HPP_NAMESPACE_STRING"::Device::getSwapchainImagesKHR" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::acquireNextImageKHR( SwapchainKHR swapchain, uint64_t timeout, Semaphore semaphore, Fence fence, uint32_t* pImageIndex, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkAcquireNextImageKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), timeout, static_cast<VkSemaphore>( semaphore ), static_cast<VkFence>( fence ), pImageIndex ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValue<uint32_t> Device::acquireNextImageKHR( SwapchainKHR swapchain, uint64_t timeout, Semaphore semaphore, Fence fence, Dispatch const &d ) const
-  {
-    uint32_t imageIndex;
-    Result result = static_cast<Result>( d.vkAcquireNextImageKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), timeout, static_cast<VkSemaphore>( semaphore ), static_cast<VkFence>( fence ), &imageIndex ) );
-    return createResultValue( result, imageIndex, VULKAN_HPP_NAMESPACE_STRING"::Device::acquireNextImageKHR", { Result::eSuccess, Result::eTimeout, Result::eNotReady, Result::eSuboptimalKHR } );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT* pNameInfo, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT*>( pNameInfo ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<void>::type Device::debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT & nameInfo, Dispatch const &d ) const
-  {
-    Result result = static_cast<Result>( d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT*>( &nameInfo ) ) );
-    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::debugMarkerSetObjectNameEXT" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT* pTagInfo, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT*>( pTagInfo ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<void>::type Device::debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT & tagInfo, Dispatch const &d ) const
-  {
-    Result result = static_cast<Result>( d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT*>( &tagInfo ) ) );
-    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::debugMarkerSetObjectTagEXT" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VK_USE_PLATFORM_WIN32_NV
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleNV( DeviceMemory memory, ExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle, Dispatch const &d) const
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
     return static_cast<Result>( d.vkGetMemoryWin32HandleNV( m_device, static_cast<VkDeviceMemory>( memory ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ), pHandle ) );
   }
+
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<HANDLE>::type Device::getMemoryWin32HandleNV( DeviceMemory memory, ExternalMemoryHandleTypeFlagsNV handleType, Dispatch const &d ) const
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type Device::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, Dispatch const & d ) const
   {
     HANDLE handle;
     Result result = static_cast<Result>( d.vkGetMemoryWin32HandleNV( m_device, static_cast<VkDeviceMemory>( memory ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ), &handle ) );
-    return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryWin32HandleNV" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_WIN32_NV*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::createIndirectCommandsLayoutNVX( const IndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const AllocationCallbacks* pAllocator, IndirectCommandsLayoutNVX* pIndirectCommandsLayout, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkCreateIndirectCommandsLayoutNVX( m_device, reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNVX*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkIndirectCommandsLayoutNVX*>( pIndirectCommandsLayout ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<IndirectCommandsLayoutNVX>::type Device::createIndirectCommandsLayoutNVX( const IndirectCommandsLayoutCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    IndirectCommandsLayoutNVX indirectCommandsLayout;
-    Result result = static_cast<Result>( d.vkCreateIndirectCommandsLayoutNVX( m_device, reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNVX*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkIndirectCommandsLayoutNVX*>( &indirectCommandsLayout ) ) );
-    return createResultValue( result, indirectCommandsLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createIndirectCommandsLayoutNVX" );
-  }
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<IndirectCommandsLayoutNVX,Dispatch>>::type Device::createIndirectCommandsLayoutNVXUnique( const IndirectCommandsLayoutCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    IndirectCommandsLayoutNVX indirectCommandsLayout;
-    Result result = static_cast<Result>( d.vkCreateIndirectCommandsLayoutNVX( m_device, reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNVX*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkIndirectCommandsLayoutNVX*>( &indirectCommandsLayout ) ) );
-
-    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
-    return createResultValue<IndirectCommandsLayoutNVX,Dispatch>( result, indirectCommandsLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createIndirectCommandsLayoutNVXUnique", deleter );
-  }
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNVX( IndirectCommandsLayoutNVX indirectCommandsLayout, const AllocationCallbacks* pAllocator, Dispatch const &d) const
-  {
-    d.vkDestroyIndirectCommandsLayoutNVX( m_device, static_cast<VkIndirectCommandsLayoutNVX>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNVX( IndirectCommandsLayoutNVX indirectCommandsLayout, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    d.vkDestroyIndirectCommandsLayoutNVX( m_device, static_cast<VkIndirectCommandsLayoutNVX>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroy( IndirectCommandsLayoutNVX indirectCommandsLayout, const AllocationCallbacks* pAllocator, Dispatch const &d) const
-  {
-    d.vkDestroyIndirectCommandsLayoutNVX( m_device, static_cast<VkIndirectCommandsLayoutNVX>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroy( IndirectCommandsLayoutNVX indirectCommandsLayout, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    d.vkDestroyIndirectCommandsLayoutNVX( m_device, static_cast<VkIndirectCommandsLayoutNVX>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::createObjectTableNVX( const ObjectTableCreateInfoNVX* pCreateInfo, const AllocationCallbacks* pAllocator, ObjectTableNVX* pObjectTable, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkCreateObjectTableNVX( m_device, reinterpret_cast<const VkObjectTableCreateInfoNVX*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkObjectTableNVX*>( pObjectTable ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<ObjectTableNVX>::type Device::createObjectTableNVX( const ObjectTableCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    ObjectTableNVX objectTable;
-    Result result = static_cast<Result>( d.vkCreateObjectTableNVX( m_device, reinterpret_cast<const VkObjectTableCreateInfoNVX*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkObjectTableNVX*>( &objectTable ) ) );
-    return createResultValue( result, objectTable, VULKAN_HPP_NAMESPACE_STRING"::Device::createObjectTableNVX" );
-  }
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<ObjectTableNVX,Dispatch>>::type Device::createObjectTableNVXUnique( const ObjectTableCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    ObjectTableNVX objectTable;
-    Result result = static_cast<Result>( d.vkCreateObjectTableNVX( m_device, reinterpret_cast<const VkObjectTableCreateInfoNVX*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkObjectTableNVX*>( &objectTable ) ) );
-
-    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
-    return createResultValue<ObjectTableNVX,Dispatch>( result, objectTable, VULKAN_HPP_NAMESPACE_STRING"::Device::createObjectTableNVXUnique", deleter );
-  }
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroyObjectTableNVX( ObjectTableNVX objectTable, const AllocationCallbacks* pAllocator, Dispatch const &d) const
-  {
-    d.vkDestroyObjectTableNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroyObjectTableNVX( ObjectTableNVX objectTable, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    d.vkDestroyObjectTableNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroy( ObjectTableNVX objectTable, const AllocationCallbacks* pAllocator, Dispatch const &d) const
-  {
-    d.vkDestroyObjectTableNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroy( ObjectTableNVX objectTable, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    d.vkDestroyObjectTableNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::registerObjectsNVX( ObjectTableNVX objectTable, uint32_t objectCount, const ObjectTableEntryNVX* const* ppObjectTableEntries, const uint32_t* pObjectIndices, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkRegisterObjectsNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), objectCount, reinterpret_cast<const VkObjectTableEntryNVX* const*>( ppObjectTableEntries ), pObjectIndices ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<void>::type Device::registerObjectsNVX( ObjectTableNVX objectTable, ArrayProxy<const ObjectTableEntryNVX* const> pObjectTableEntries, ArrayProxy<const uint32_t> objectIndices, Dispatch const &d ) const
-  {
-#ifdef VULKAN_HPP_NO_EXCEPTIONS
-    VULKAN_HPP_ASSERT( pObjectTableEntries.size() == objectIndices.size() );
-#else
-    if ( pObjectTableEntries.size() != objectIndices.size() )
-    {
-      throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::registerObjectsNVX: pObjectTableEntries.size() != objectIndices.size()" );
-    }
-#endif  // VULKAN_HPP_NO_EXCEPTIONS
-    Result result = static_cast<Result>( d.vkRegisterObjectsNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), pObjectTableEntries.size() , reinterpret_cast<const VkObjectTableEntryNVX* const*>( pObjectTableEntries.data() ), objectIndices.data() ) );
-    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::registerObjectsNVX" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::unregisterObjectsNVX( ObjectTableNVX objectTable, uint32_t objectCount, const ObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkUnregisterObjectsNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), objectCount, reinterpret_cast<const VkObjectEntryTypeNVX*>( pObjectEntryTypes ), pObjectIndices ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<void>::type Device::unregisterObjectsNVX( ObjectTableNVX objectTable, ArrayProxy<const ObjectEntryTypeNVX> objectEntryTypes, ArrayProxy<const uint32_t> objectIndices, Dispatch const &d ) const
-  {
-#ifdef VULKAN_HPP_NO_EXCEPTIONS
-    VULKAN_HPP_ASSERT( objectEntryTypes.size() == objectIndices.size() );
-#else
-    if ( objectEntryTypes.size() != objectIndices.size() )
-    {
-      throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::unregisterObjectsNVX: objectEntryTypes.size() != objectIndices.size()" );
-    }
-#endif  // VULKAN_HPP_NO_EXCEPTIONS
-    Result result = static_cast<Result>( d.vkUnregisterObjectsNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), objectEntryTypes.size() , reinterpret_cast<const VkObjectEntryTypeNVX*>( objectEntryTypes.data() ), objectIndices.data() ) );
-    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::unregisterObjectsNVX" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::trimCommandPool( CommandPool commandPool, CommandPoolTrimFlags flags, Dispatch const &d) const
-  {
-    d.vkTrimCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) );
-  }
-#else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::trimCommandPool( CommandPool commandPool, CommandPoolTrimFlags flags, Dispatch const &d ) const
-  {
-    d.vkTrimCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::trimCommandPoolKHR( CommandPool commandPool, CommandPoolTrimFlags flags, Dispatch const &d) const
-  {
-    d.vkTrimCommandPoolKHR( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) );
-  }
-#else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::trimCommandPoolKHR( CommandPool commandPool, CommandPoolTrimFlags flags, Dispatch const &d ) const
-  {
-    d.vkTrimCommandPoolKHR( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleKHR( const MemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR*>( pGetWin32HandleInfo ), pHandle ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<HANDLE>::type Device::getMemoryWin32HandleKHR( const MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d ) const
-  {
-    HANDLE handle;
-    Result result = static_cast<Result>( d.vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR*>( &getWin32HandleInfo ), &handle ) );
-    return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryWin32HandleKHR" );
+    return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleNV" );
   }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 #endif /*VK_USE_PLATFORM_WIN32_KHR*/
 
+
 #ifdef VK_USE_PLATFORM_WIN32_KHR
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::getMemoryWin32HandlePropertiesKHR( ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, MemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties, Dispatch const &d) const
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
-    return static_cast<Result>( d.vkGetMemoryWin32HandlePropertiesKHR( m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), handle, reinterpret_cast<VkMemoryWin32HandlePropertiesKHR*>( pMemoryWin32HandleProperties ) ) );
+    return static_cast<Result>( d.vkGetMemoryWin32HandlePropertiesKHR( m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), handle, reinterpret_cast< VkMemoryWin32HandlePropertiesKHR *>( pMemoryWin32HandleProperties ) ) );
   }
+
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<MemoryWin32HandlePropertiesKHR>::type Device::getMemoryWin32HandlePropertiesKHR( ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, Dispatch const &d ) const
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR>::type Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, Dispatch const & d ) const
   {
-    MemoryWin32HandlePropertiesKHR memoryWin32HandleProperties;
-    Result result = static_cast<Result>( d.vkGetMemoryWin32HandlePropertiesKHR( m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), handle, reinterpret_cast<VkMemoryWin32HandlePropertiesKHR*>( &memoryWin32HandleProperties ) ) );
-    return createResultValue( result, memoryWin32HandleProperties, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryWin32HandlePropertiesKHR" );
+    VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR memoryWin32HandleProperties;
+    Result result = static_cast<Result>( d.vkGetMemoryWin32HandlePropertiesKHR( m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), handle, reinterpret_cast< VkMemoryWin32HandlePropertiesKHR *>( &memoryWin32HandleProperties ) ) );
+    return createResultValue( result, memoryWin32HandleProperties, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandlePropertiesKHR" );
   }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 #endif /*VK_USE_PLATFORM_WIN32_KHR*/
 
   template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::getMemoryFdKHR( const MemoryGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkGetMemoryFdKHR( m_device, reinterpret_cast<const VkMemoryGetFdInfoKHR*>( pGetFdInfo ), pFd ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<int>::type Device::getMemoryFdKHR( const MemoryGetFdInfoKHR & getFdInfo, Dispatch const &d ) const
-  {
-    int fd;
-    Result result = static_cast<Result>( d.vkGetMemoryFdKHR( m_device, reinterpret_cast<const VkMemoryGetFdInfoKHR*>( &getFdInfo ), &fd ) );
-    return createResultValue( result, fd, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryFdKHR" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::getMemoryFdPropertiesKHR( ExternalMemoryHandleTypeFlagBits handleType, int fd, MemoryFdPropertiesKHR* pMemoryFdProperties, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkGetMemoryFdPropertiesKHR( m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), fd, reinterpret_cast<VkMemoryFdPropertiesKHR*>( pMemoryFdProperties ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<MemoryFdPropertiesKHR>::type Device::getMemoryFdPropertiesKHR( ExternalMemoryHandleTypeFlagBits handleType, int fd, Dispatch const &d ) const
-  {
-    MemoryFdPropertiesKHR memoryFdProperties;
-    Result result = static_cast<Result>( d.vkGetMemoryFdPropertiesKHR( m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), fd, reinterpret_cast<VkMemoryFdPropertiesKHR*>( &memoryFdProperties ) ) );
-    return createResultValue( result, memoryFdProperties, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryFdPropertiesKHR" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::getSemaphoreWin32HandleKHR( const SemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR*>( pGetWin32HandleInfo ), pHandle ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<HANDLE>::type Device::getSemaphoreWin32HandleKHR( const SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d ) const
-  {
-    HANDLE handle;
-    Result result = static_cast<Result>( d.vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR*>( &getWin32HandleInfo ), &handle ) );
-    return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING"::Device::getSemaphoreWin32HandleKHR" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR*>( pImportSemaphoreWin32HandleInfo ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<void>::type Device::importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo, Dispatch const &d ) const
-  {
-    Result result = static_cast<Result>( d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR*>( &importSemaphoreWin32HandleInfo ) ) );
-    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::importSemaphoreWin32HandleKHR" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::getSemaphoreFdKHR( const SemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast<const VkSemaphoreGetFdInfoKHR*>( pGetFdInfo ), pFd ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<int>::type Device::getSemaphoreFdKHR( const SemaphoreGetFdInfoKHR & getFdInfo, Dispatch const &d ) const
-  {
-    int fd;
-    Result result = static_cast<Result>( d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast<const VkSemaphoreGetFdInfoKHR*>( &getFdInfo ), &fd ) );
-    return createResultValue( result, fd, VULKAN_HPP_NAMESPACE_STRING"::Device::getSemaphoreFdKHR" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast<const VkImportSemaphoreFdInfoKHR*>( pImportSemaphoreFdInfo ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<void>::type Device::importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, Dispatch const &d ) const
-  {
-    Result result = static_cast<Result>( d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast<const VkImportSemaphoreFdInfoKHR*>( &importSemaphoreFdInfo ) ) );
-    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::importSemaphoreFdKHR" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::getFenceWin32HandleKHR( const FenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkGetFenceWin32HandleKHR( m_device, reinterpret_cast<const VkFenceGetWin32HandleInfoKHR*>( pGetWin32HandleInfo ), pHandle ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<HANDLE>::type Device::getFenceWin32HandleKHR( const FenceGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d ) const
-  {
-    HANDLE handle;
-    Result result = static_cast<Result>( d.vkGetFenceWin32HandleKHR( m_device, reinterpret_cast<const VkFenceGetWin32HandleInfoKHR*>( &getWin32HandleInfo ), &handle ) );
-    return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING"::Device::getFenceWin32HandleKHR" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast<const VkImportFenceWin32HandleInfoKHR*>( pImportFenceWin32HandleInfo ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<void>::type Device::importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, Dispatch const &d ) const
-  {
-    Result result = static_cast<Result>( d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast<const VkImportFenceWin32HandleInfoKHR*>( &importFenceWin32HandleInfo ) ) );
-    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::importFenceWin32HandleKHR" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::getFenceFdKHR( const FenceGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkGetFenceFdKHR( m_device, reinterpret_cast<const VkFenceGetFdInfoKHR*>( pGetFdInfo ), pFd ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<int>::type Device::getFenceFdKHR( const FenceGetFdInfoKHR & getFdInfo, Dispatch const &d ) const
-  {
-    int fd;
-    Result result = static_cast<Result>( d.vkGetFenceFdKHR( m_device, reinterpret_cast<const VkFenceGetFdInfoKHR*>( &getFdInfo ), &fd ) );
-    return createResultValue( result, fd, VULKAN_HPP_NAMESPACE_STRING"::Device::getFenceFdKHR" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::importFenceFdKHR( const ImportFenceFdInfoKHR* pImportFenceFdInfo, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkImportFenceFdKHR( m_device, reinterpret_cast<const VkImportFenceFdInfoKHR*>( pImportFenceFdInfo ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<void>::type Device::importFenceFdKHR( const ImportFenceFdInfoKHR & importFenceFdInfo, Dispatch const &d ) const
-  {
-    Result result = static_cast<Result>( d.vkImportFenceFdKHR( m_device, reinterpret_cast<const VkImportFenceFdInfoKHR*>( &importFenceFdInfo ) ) );
-    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::importFenceFdKHR" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::displayPowerControlEXT( DisplayKHR display, const DisplayPowerInfoEXT* pDisplayPowerInfo, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkDisplayPowerControlEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayPowerInfoEXT*>( pDisplayPowerInfo ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<void>::type Device::displayPowerControlEXT( DisplayKHR display, const DisplayPowerInfoEXT & displayPowerInfo, Dispatch const &d ) const
-  {
-    Result result = static_cast<Result>( d.vkDisplayPowerControlEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayPowerInfoEXT*>( &displayPowerInfo ) ) );
-    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::displayPowerControlEXT" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::registerEventEXT( const DeviceEventInfoEXT* pDeviceEventInfo, const AllocationCallbacks* pAllocator, Fence* pFence, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkRegisterDeviceEventEXT( m_device, reinterpret_cast<const VkDeviceEventInfoEXT*>( pDeviceEventInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkFence*>( pFence ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<Fence>::type Device::registerEventEXT( const DeviceEventInfoEXT & deviceEventInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    Fence fence;
-    Result result = static_cast<Result>( d.vkRegisterDeviceEventEXT( m_device, reinterpret_cast<const VkDeviceEventInfoEXT*>( &deviceEventInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkFence*>( &fence ) ) );
-    return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING"::Device::registerEventEXT" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::registerDisplayEventEXT( DisplayKHR display, const DisplayEventInfoEXT* pDisplayEventInfo, const AllocationCallbacks* pAllocator, Fence* pFence, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkRegisterDisplayEventEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayEventInfoEXT*>( pDisplayEventInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkFence*>( pFence ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<Fence>::type Device::registerDisplayEventEXT( DisplayKHR display, const DisplayEventInfoEXT & displayEventInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    Fence fence;
-    Result result = static_cast<Result>( d.vkRegisterDisplayEventEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayEventInfoEXT*>( &displayEventInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkFence*>( &fence ) ) );
-    return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING"::Device::registerDisplayEventEXT" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::getSwapchainCounterEXT( SwapchainKHR swapchain, SurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkGetSwapchainCounterEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ), static_cast<VkSurfaceCounterFlagBitsEXT>( counter ), pCounterValue ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<uint64_t>::type Device::getSwapchainCounterEXT( SwapchainKHR swapchain, SurfaceCounterFlagBitsEXT counter, Dispatch const &d ) const
-  {
-    uint64_t counterValue;
-    Result result = static_cast<Result>( d.vkGetSwapchainCounterEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ), static_cast<VkSurfaceCounterFlagBitsEXT>( counter ), &counterValue ) );
-    return createResultValue( result, counterValue, VULKAN_HPP_NAMESPACE_STRING"::Device::getSwapchainCounterEXT" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const &d) const
-  {
-    d.vkGetDeviceGroupPeerMemoryFeatures( m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags*>( pPeerMemoryFeatures ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE PeerMemoryFeatureFlags Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const &d ) const
-  {
-    PeerMemoryFeatureFlags peerMemoryFeatures;
-    d.vkGetDeviceGroupPeerMemoryFeatures( m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags*>( &peerMemoryFeatures ) );
-    return peerMemoryFeatures;
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const &d) const
-  {
-    d.vkGetDeviceGroupPeerMemoryFeaturesKHR( m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags*>( pPeerMemoryFeatures ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE PeerMemoryFeatureFlags Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const &d ) const
-  {
-    PeerMemoryFeatureFlags peerMemoryFeatures;
-    d.vkGetDeviceGroupPeerMemoryFeaturesKHR( m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags*>( &peerMemoryFeatures ) );
-    return peerMemoryFeatures;
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::bindBufferMemory2( uint32_t bindInfoCount, const BindBufferMemoryInfo* pBindInfos, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkBindBufferMemory2( m_device, bindInfoCount, reinterpret_cast<const VkBindBufferMemoryInfo*>( pBindInfos ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<void>::type Device::bindBufferMemory2( ArrayProxy<const BindBufferMemoryInfo> bindInfos, Dispatch const &d ) const
-  {
-    Result result = static_cast<Result>( d.vkBindBufferMemory2( m_device, bindInfos.size() , reinterpret_cast<const VkBindBufferMemoryInfo*>( bindInfos.data() ) ) );
-    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindBufferMemory2" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::bindBufferMemory2KHR( uint32_t bindInfoCount, const BindBufferMemoryInfo* pBindInfos, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkBindBufferMemory2KHR( m_device, bindInfoCount, reinterpret_cast<const VkBindBufferMemoryInfo*>( pBindInfos ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<void>::type Device::bindBufferMemory2KHR( ArrayProxy<const BindBufferMemoryInfo> bindInfos, Dispatch const &d ) const
-  {
-    Result result = static_cast<Result>( d.vkBindBufferMemory2KHR( m_device, bindInfos.size() , reinterpret_cast<const VkBindBufferMemoryInfo*>( bindInfos.data() ) ) );
-    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindBufferMemory2KHR" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::bindImageMemory2( uint32_t bindInfoCount, const BindImageMemoryInfo* pBindInfos, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkBindImageMemory2( m_device, bindInfoCount, reinterpret_cast<const VkBindImageMemoryInfo*>( pBindInfos ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<void>::type Device::bindImageMemory2( ArrayProxy<const BindImageMemoryInfo> bindInfos, Dispatch const &d ) const
-  {
-    Result result = static_cast<Result>( d.vkBindImageMemory2( m_device, bindInfos.size() , reinterpret_cast<const VkBindImageMemoryInfo*>( bindInfos.data() ) ) );
-    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindImageMemory2" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::bindImageMemory2KHR( uint32_t bindInfoCount, const BindImageMemoryInfo* pBindInfos, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkBindImageMemory2KHR( m_device, bindInfoCount, reinterpret_cast<const VkBindImageMemoryInfo*>( pBindInfos ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<void>::type Device::bindImageMemory2KHR( ArrayProxy<const BindImageMemoryInfo> bindInfos, Dispatch const &d ) const
-  {
-    Result result = static_cast<Result>( d.vkBindImageMemory2KHR( m_device, bindInfos.size() , reinterpret_cast<const VkBindImageMemoryInfo*>( bindInfos.data() ) ) );
-    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindImageMemory2KHR" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::getGroupPresentCapabilitiesKHR( DeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR*>( pDeviceGroupPresentCapabilities ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<DeviceGroupPresentCapabilitiesKHR>::type Device::getGroupPresentCapabilitiesKHR(Dispatch const &d ) const
-  {
-    DeviceGroupPresentCapabilitiesKHR deviceGroupPresentCapabilities;
-    Result result = static_cast<Result>( d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR*>( &deviceGroupPresentCapabilities ) ) );
-    return createResultValue( result, deviceGroupPresentCapabilities, VULKAN_HPP_NAMESPACE_STRING"::Device::getGroupPresentCapabilitiesKHR" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::getGroupSurfacePresentModesKHR( SurfaceKHR surface, DeviceGroupPresentModeFlagsKHR* pModes, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModesKHR( m_device, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR*>( pModes ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<DeviceGroupPresentModeFlagsKHR>::type Device::getGroupSurfacePresentModesKHR( SurfaceKHR surface, Dispatch const &d ) const
-  {
-    DeviceGroupPresentModeFlagsKHR modes;
-    Result result = static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModesKHR( m_device, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR*>( &modes ) ) );
-    return createResultValue( result, modes, VULKAN_HPP_NAMESPACE_STRING"::Device::getGroupSurfacePresentModesKHR" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::acquireNextImage2KHR( const AcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkAcquireNextImage2KHR( m_device, reinterpret_cast<const VkAcquireNextImageInfoKHR*>( pAcquireInfo ), pImageIndex ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValue<uint32_t> Device::acquireNextImage2KHR( const AcquireNextImageInfoKHR & acquireInfo, Dispatch const &d ) const
-  {
-    uint32_t imageIndex;
-    Result result = static_cast<Result>( d.vkAcquireNextImage2KHR( m_device, reinterpret_cast<const VkAcquireNextImageInfoKHR*>( &acquireInfo ), &imageIndex ) );
-    return createResultValue( result, imageIndex, VULKAN_HPP_NAMESPACE_STRING"::Device::acquireNextImage2KHR", { Result::eSuccess, Result::eTimeout, Result::eNotReady, Result::eSuboptimalKHR } );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::createDescriptorUpdateTemplate( const DescriptorUpdateTemplateCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkCreateDescriptorUpdateTemplate( m_device, reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDescriptorUpdateTemplate*>( pDescriptorUpdateTemplate ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<DescriptorUpdateTemplate>::type Device::createDescriptorUpdateTemplate( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    DescriptorUpdateTemplate descriptorUpdateTemplate;
-    Result result = static_cast<Result>( d.vkCreateDescriptorUpdateTemplate( m_device, reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDescriptorUpdateTemplate*>( &descriptorUpdateTemplate ) ) );
-    return createResultValue( result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorUpdateTemplate" );
-  }
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<DescriptorUpdateTemplate,Dispatch>>::type Device::createDescriptorUpdateTemplateUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    DescriptorUpdateTemplate descriptorUpdateTemplate;
-    Result result = static_cast<Result>( d.vkCreateDescriptorUpdateTemplate( m_device, reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDescriptorUpdateTemplate*>( &descriptorUpdateTemplate ) ) );
-
-    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
-    return createResultValue<DescriptorUpdateTemplate,Dispatch>( result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorUpdateTemplateUnique", deleter );
-  }
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::createDescriptorUpdateTemplateKHR( const DescriptorUpdateTemplateCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkCreateDescriptorUpdateTemplateKHR( m_device, reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDescriptorUpdateTemplate*>( pDescriptorUpdateTemplate ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<DescriptorUpdateTemplate>::type Device::createDescriptorUpdateTemplateKHR( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    DescriptorUpdateTemplate descriptorUpdateTemplate;
-    Result result = static_cast<Result>( d.vkCreateDescriptorUpdateTemplateKHR( m_device, reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDescriptorUpdateTemplate*>( &descriptorUpdateTemplate ) ) );
-    return createResultValue( result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorUpdateTemplateKHR" );
-  }
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<DescriptorUpdateTemplate,Dispatch>>::type Device::createDescriptorUpdateTemplateKHRUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    DescriptorUpdateTemplate descriptorUpdateTemplate;
-    Result result = static_cast<Result>( d.vkCreateDescriptorUpdateTemplateKHR( m_device, reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDescriptorUpdateTemplate*>( &descriptorUpdateTemplate ) ) );
-
-    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
-    return createResultValue<DescriptorUpdateTemplate,Dispatch>( result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorUpdateTemplateKHRUnique", deleter );
-  }
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplate( DescriptorUpdateTemplate descriptorUpdateTemplate, const AllocationCallbacks* pAllocator, Dispatch const &d) const
-  {
-    d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplate( DescriptorUpdateTemplate descriptorUpdateTemplate, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroy( DescriptorUpdateTemplate descriptorUpdateTemplate, const AllocationCallbacks* pAllocator, Dispatch const &d) const
-  {
-    d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroy( DescriptorUpdateTemplate descriptorUpdateTemplate, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( DescriptorUpdateTemplate descriptorUpdateTemplate, const AllocationCallbacks* pAllocator, Dispatch const &d) const
-  {
-    d.vkDestroyDescriptorUpdateTemplateKHR( m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( DescriptorUpdateTemplate descriptorUpdateTemplate, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    d.vkDestroyDescriptorUpdateTemplateKHR( m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplate( DescriptorSet descriptorSet, DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d) const
-  {
-    d.vkUpdateDescriptorSetWithTemplate( m_device, static_cast<VkDescriptorSet>( descriptorSet ), static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), pData );
-  }
-#else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplate( DescriptorSet descriptorSet, DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d ) const
-  {
-    d.vkUpdateDescriptorSetWithTemplate( m_device, static_cast<VkDescriptorSet>( descriptorSet ), static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), pData );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplateKHR( DescriptorSet descriptorSet, DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d) const
-  {
-    d.vkUpdateDescriptorSetWithTemplateKHR( m_device, static_cast<VkDescriptorSet>( descriptorSet ), static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), pData );
-  }
-#else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplateKHR( DescriptorSet descriptorSet, DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d ) const
-  {
-    d.vkUpdateDescriptorSetWithTemplateKHR( m_device, static_cast<VkDescriptorSet>( descriptorSet ), static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), pData );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( uint32_t swapchainCount, const SwapchainKHR* pSwapchains, const HdrMetadataEXT* pMetadata, Dispatch const &d) const
-  {
-    d.vkSetHdrMetadataEXT( m_device, swapchainCount, reinterpret_cast<const VkSwapchainKHR*>( pSwapchains ), reinterpret_cast<const VkHdrMetadataEXT*>( pMetadata ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( ArrayProxy<const SwapchainKHR> swapchains, ArrayProxy<const HdrMetadataEXT> metadata, Dispatch const &d ) const
-  {
-#ifdef VULKAN_HPP_NO_EXCEPTIONS
-    VULKAN_HPP_ASSERT( swapchains.size() == metadata.size() );
-#else
-    if ( swapchains.size() != metadata.size() )
-    {
-      throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::setHdrMetadataEXT: swapchains.size() != metadata.size()" );
-    }
-#endif  // VULKAN_HPP_NO_EXCEPTIONS
-    d.vkSetHdrMetadataEXT( m_device, swapchains.size() , reinterpret_cast<const VkSwapchainKHR*>( swapchains.data() ), reinterpret_cast<const VkHdrMetadataEXT*>( metadata.data() ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::getSwapchainStatusKHR( SwapchainKHR swapchain, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkGetSwapchainStatusKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
-  }
-#else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::getSwapchainStatusKHR( SwapchainKHR swapchain, Dispatch const &d ) const
-  {
-    Result result = static_cast<Result>( d.vkGetSwapchainStatusKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
-    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getSwapchainStatusKHR", { Result::eSuccess, Result::eSuboptimalKHR } );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::getRefreshCycleDurationGOOGLE( SwapchainKHR swapchain, RefreshCycleDurationGOOGLE* pDisplayTimingProperties, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkGetRefreshCycleDurationGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkRefreshCycleDurationGOOGLE*>( pDisplayTimingProperties ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<RefreshCycleDurationGOOGLE>::type Device::getRefreshCycleDurationGOOGLE( SwapchainKHR swapchain, Dispatch const &d ) const
-  {
-    RefreshCycleDurationGOOGLE displayTimingProperties;
-    Result result = static_cast<Result>( d.vkGetRefreshCycleDurationGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkRefreshCycleDurationGOOGLE*>( &displayTimingProperties ) ) );
-    return createResultValue( result, displayTimingProperties, VULKAN_HPP_NAMESPACE_STRING"::Device::getRefreshCycleDurationGOOGLE" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::getPastPresentationTimingGOOGLE( SwapchainKHR swapchain, uint32_t* pPresentationTimingCount, PastPresentationTimingGOOGLE* pPresentationTimings, Dispatch const &d) const
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint32_t* pPresentationTimingCount, VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE* pPresentationTimings, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
   {
     return static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), pPresentationTimingCount, reinterpret_cast<VkPastPresentationTimingGOOGLE*>( pPresentationTimings ) ) );
   }
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<PastPresentationTimingGOOGLE,Allocator>>::type Device::getPastPresentationTimingGOOGLE( SwapchainKHR swapchain, Dispatch const &d ) const
+  template<typename Allocator , typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PastPresentationTimingGOOGLE,Allocator>>::type Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d ) const
   {
     std::vector<PastPresentationTimingGOOGLE,Allocator> presentationTimings;
     uint32_t presentationTimingCount;
@@ -44217,12 +86697,15 @@
         result = static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), &presentationTimingCount, reinterpret_cast<VkPastPresentationTimingGOOGLE*>( presentationTimings.data() ) ) );
       }
     } while ( result == Result::eIncomplete );
-    VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() );
-    presentationTimings.resize( presentationTimingCount );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() );
+      presentationTimings.resize( presentationTimingCount );
+    }
     return createResultValue( result, presentationTimings, VULKAN_HPP_NAMESPACE_STRING"::Device::getPastPresentationTimingGOOGLE" );
   }
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<PastPresentationTimingGOOGLE,Allocator>>::type Device::getPastPresentationTimingGOOGLE( SwapchainKHR swapchain, Allocator const& vectorAllocator, Dispatch const &d ) const
+  template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, PastPresentationTimingGOOGLE>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PastPresentationTimingGOOGLE,Allocator>>::type Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Allocator const& vectorAllocator, Dispatch const &d ) const
   {
     std::vector<PastPresentationTimingGOOGLE,Allocator> presentationTimings( vectorAllocator );
     uint32_t presentationTimingCount;
@@ -44236,432 +86719,511 @@
         result = static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), &presentationTimingCount, reinterpret_cast<VkPastPresentationTimingGOOGLE*>( presentationTimings.data() ) ) );
       }
     } while ( result == Result::eIncomplete );
-    VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() );
-    presentationTimings.resize( presentationTimingCount );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() );
+      presentationTimings.resize( presentationTimingCount );
+    }
     return createResultValue( result, presentationTimings, VULKAN_HPP_NAMESPACE_STRING"::Device::getPastPresentationTimingGOOGLE" );
   }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2* pInfo, MemoryRequirements2* pMemoryRequirements, Dispatch const &d) const
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, VULKAN_HPP_NAMESPACE::PerformanceValueINTEL* pValue, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
-    d.vkGetBufferMemoryRequirements2( m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2*>( pInfo ), reinterpret_cast<VkMemoryRequirements2*>( pMemoryRequirements ) );
+    return static_cast<Result>( d.vkGetPerformanceParameterINTEL( m_device, static_cast<VkPerformanceParameterTypeINTEL>( parameter ), reinterpret_cast< VkPerformanceValueINTEL *>( pValue ) ) );
   }
+
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE MemoryRequirements2 Device::getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d ) const
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PerformanceValueINTEL>::type Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, Dispatch const & d ) const
   {
-    MemoryRequirements2 memoryRequirements;
-    d.vkGetBufferMemoryRequirements2( m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2*>( &info ), reinterpret_cast<VkMemoryRequirements2*>( &memoryRequirements ) );
-    return memoryRequirements;
-  }
-  template <typename X, typename Y, typename ...Z, typename Dispatch>
-  VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d ) const
-  {
-    StructureChain<X, Y, Z...> structureChain;
-    MemoryRequirements2& memoryRequirements = structureChain.template get<MemoryRequirements2>();
-    d.vkGetBufferMemoryRequirements2( m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2*>( &info ), reinterpret_cast<VkMemoryRequirements2*>( &memoryRequirements ) );
-    return structureChain;
+    VULKAN_HPP_NAMESPACE::PerformanceValueINTEL value;
+    Result result = static_cast<Result>( d.vkGetPerformanceParameterINTEL( m_device, static_cast<VkPerformanceParameterTypeINTEL>( parameter ), reinterpret_cast< VkPerformanceValueINTEL *>( &value ) ) );
+    return createResultValue( result, value, VULKAN_HPP_NAMESPACE_STRING "::Device::getPerformanceParameterINTEL" );
   }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
   template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2* pInfo, MemoryRequirements2* pMemoryRequirements, Dispatch const &d) const
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, size_t* pDataSize, void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
   {
-    d.vkGetBufferMemoryRequirements2KHR( m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2*>( pInfo ), reinterpret_cast<VkMemoryRequirements2*>( pMemoryRequirements ) );
+    return static_cast<Result>( d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), pDataSize, pData ) );
   }
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE MemoryRequirements2 Device::getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d ) const
-  {
-    MemoryRequirements2 memoryRequirements;
-    d.vkGetBufferMemoryRequirements2KHR( m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2*>( &info ), reinterpret_cast<VkMemoryRequirements2*>( &memoryRequirements ) );
-    return memoryRequirements;
-  }
-  template <typename X, typename Y, typename ...Z, typename Dispatch>
-  VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d ) const
-  {
-    StructureChain<X, Y, Z...> structureChain;
-    MemoryRequirements2& memoryRequirements = structureChain.template get<MemoryRequirements2>();
-    d.vkGetBufferMemoryRequirements2KHR( m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2*>( &info ), reinterpret_cast<VkMemoryRequirements2*>( &memoryRequirements ) );
-    return structureChain;
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2* pInfo, MemoryRequirements2* pMemoryRequirements, Dispatch const &d) const
-  {
-    d.vkGetImageMemoryRequirements2( m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2*>( pInfo ), reinterpret_cast<VkMemoryRequirements2*>( pMemoryRequirements ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE MemoryRequirements2 Device::getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d ) const
-  {
-    MemoryRequirements2 memoryRequirements;
-    d.vkGetImageMemoryRequirements2( m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2*>( &info ), reinterpret_cast<VkMemoryRequirements2*>( &memoryRequirements ) );
-    return memoryRequirements;
-  }
-  template <typename X, typename Y, typename ...Z, typename Dispatch>
-  VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d ) const
-  {
-    StructureChain<X, Y, Z...> structureChain;
-    MemoryRequirements2& memoryRequirements = structureChain.template get<MemoryRequirements2>();
-    d.vkGetImageMemoryRequirements2( m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2*>( &info ), reinterpret_cast<VkMemoryRequirements2*>( &memoryRequirements ) );
-    return structureChain;
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2* pInfo, MemoryRequirements2* pMemoryRequirements, Dispatch const &d) const
-  {
-    d.vkGetImageMemoryRequirements2KHR( m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2*>( pInfo ), reinterpret_cast<VkMemoryRequirements2*>( pMemoryRequirements ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE MemoryRequirements2 Device::getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d ) const
-  {
-    MemoryRequirements2 memoryRequirements;
-    d.vkGetImageMemoryRequirements2KHR( m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2*>( &info ), reinterpret_cast<VkMemoryRequirements2*>( &memoryRequirements ) );
-    return memoryRequirements;
-  }
-  template <typename X, typename Y, typename ...Z, typename Dispatch>
-  VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d ) const
-  {
-    StructureChain<X, Y, Z...> structureChain;
-    MemoryRequirements2& memoryRequirements = structureChain.template get<MemoryRequirements2>();
-    d.vkGetImageMemoryRequirements2KHR( m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2*>( &info ), reinterpret_cast<VkMemoryRequirements2*>( &memoryRequirements ) );
-    return structureChain;
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, SparseImageMemoryRequirements2* pSparseMemoryRequirements, Dispatch const &d) const
-  {
-    d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2*>( pInfo ), pSparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2*>( pSparseMemoryRequirements ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements2,Allocator> Device::getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, Dispatch const &d ) const
-  {
-    std::vector<SparseImageMemoryRequirements2,Allocator> sparseMemoryRequirements;
-    uint32_t sparseMemoryRequirementCount;
-    d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2*>( &info ), &sparseMemoryRequirementCount, nullptr );
-    sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
-    d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2*>( &info ), &sparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2*>( sparseMemoryRequirements.data() ) );
-    return sparseMemoryRequirements;
-  }
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements2,Allocator> Device::getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, Allocator const& vectorAllocator, Dispatch const &d ) const
-  {
-    std::vector<SparseImageMemoryRequirements2,Allocator> sparseMemoryRequirements( vectorAllocator );
-    uint32_t sparseMemoryRequirementCount;
-    d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2*>( &info ), &sparseMemoryRequirementCount, nullptr );
-    sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
-    d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2*>( &info ), &sparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2*>( sparseMemoryRequirements.data() ) );
-    return sparseMemoryRequirements;
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, SparseImageMemoryRequirements2* pSparseMemoryRequirements, Dispatch const &d) const
-  {
-    d.vkGetImageSparseMemoryRequirements2KHR( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2*>( pInfo ), pSparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2*>( pSparseMemoryRequirements ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements2,Allocator> Device::getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, Dispatch const &d ) const
-  {
-    std::vector<SparseImageMemoryRequirements2,Allocator> sparseMemoryRequirements;
-    uint32_t sparseMemoryRequirementCount;
-    d.vkGetImageSparseMemoryRequirements2KHR( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2*>( &info ), &sparseMemoryRequirementCount, nullptr );
-    sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
-    d.vkGetImageSparseMemoryRequirements2KHR( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2*>( &info ), &sparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2*>( sparseMemoryRequirements.data() ) );
-    return sparseMemoryRequirements;
-  }
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements2,Allocator> Device::getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, Allocator const& vectorAllocator, Dispatch const &d ) const
-  {
-    std::vector<SparseImageMemoryRequirements2,Allocator> sparseMemoryRequirements( vectorAllocator );
-    uint32_t sparseMemoryRequirementCount;
-    d.vkGetImageSparseMemoryRequirements2KHR( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2*>( &info ), &sparseMemoryRequirementCount, nullptr );
-    sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
-    d.vkGetImageSparseMemoryRequirements2KHR( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2*>( &info ), &sparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2*>( sparseMemoryRequirements.data() ) );
-    return sparseMemoryRequirements;
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::createSamplerYcbcrConversion( const SamplerYcbcrConversionCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, SamplerYcbcrConversion* pYcbcrConversion, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkCreateSamplerYcbcrConversion( m_device, reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSamplerYcbcrConversion*>( pYcbcrConversion ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<SamplerYcbcrConversion>::type Device::createSamplerYcbcrConversion( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    SamplerYcbcrConversion ycbcrConversion;
-    Result result = static_cast<Result>( d.vkCreateSamplerYcbcrConversion( m_device, reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSamplerYcbcrConversion*>( &ycbcrConversion ) ) );
-    return createResultValue( result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING"::Device::createSamplerYcbcrConversion" );
-  }
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SamplerYcbcrConversion,Dispatch>>::type Device::createSamplerYcbcrConversionUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    SamplerYcbcrConversion ycbcrConversion;
-    Result result = static_cast<Result>( d.vkCreateSamplerYcbcrConversion( m_device, reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSamplerYcbcrConversion*>( &ycbcrConversion ) ) );
-
-    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
-    return createResultValue<SamplerYcbcrConversion,Dispatch>( result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING"::Device::createSamplerYcbcrConversionUnique", deleter );
-  }
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::createSamplerYcbcrConversionKHR( const SamplerYcbcrConversionCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, SamplerYcbcrConversion* pYcbcrConversion, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkCreateSamplerYcbcrConversionKHR( m_device, reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSamplerYcbcrConversion*>( pYcbcrConversion ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<SamplerYcbcrConversion>::type Device::createSamplerYcbcrConversionKHR( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    SamplerYcbcrConversion ycbcrConversion;
-    Result result = static_cast<Result>( d.vkCreateSamplerYcbcrConversionKHR( m_device, reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSamplerYcbcrConversion*>( &ycbcrConversion ) ) );
-    return createResultValue( result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING"::Device::createSamplerYcbcrConversionKHR" );
-  }
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SamplerYcbcrConversion,Dispatch>>::type Device::createSamplerYcbcrConversionKHRUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    SamplerYcbcrConversion ycbcrConversion;
-    Result result = static_cast<Result>( d.vkCreateSamplerYcbcrConversionKHR( m_device, reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSamplerYcbcrConversion*>( &ycbcrConversion ) ) );
-
-    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
-    return createResultValue<SamplerYcbcrConversion,Dispatch>( result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING"::Device::createSamplerYcbcrConversionKHRUnique", deleter );
-  }
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversion( SamplerYcbcrConversion ycbcrConversion, const AllocationCallbacks* pAllocator, Dispatch const &d) const
-  {
-    d.vkDestroySamplerYcbcrConversion( m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversion( SamplerYcbcrConversion ycbcrConversion, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    d.vkDestroySamplerYcbcrConversion( m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroy( SamplerYcbcrConversion ycbcrConversion, const AllocationCallbacks* pAllocator, Dispatch const &d) const
-  {
-    d.vkDestroySamplerYcbcrConversion( m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroy( SamplerYcbcrConversion ycbcrConversion, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    d.vkDestroySamplerYcbcrConversion( m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( SamplerYcbcrConversion ycbcrConversion, const AllocationCallbacks* pAllocator, Dispatch const &d) const
-  {
-    d.vkDestroySamplerYcbcrConversionKHR( m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( SamplerYcbcrConversion ycbcrConversion, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    d.vkDestroySamplerYcbcrConversionKHR( m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::getQueue2( const DeviceQueueInfo2* pQueueInfo, Queue* pQueue, Dispatch const &d) const
-  {
-    d.vkGetDeviceQueue2( m_device, reinterpret_cast<const VkDeviceQueueInfo2*>( pQueueInfo ), reinterpret_cast<VkQueue*>( pQueue ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Queue Device::getQueue2( const DeviceQueueInfo2 & queueInfo, Dispatch const &d ) const
-  {
-    Queue queue;
-    d.vkGetDeviceQueue2( m_device, reinterpret_cast<const VkDeviceQueueInfo2*>( &queueInfo ), reinterpret_cast<VkQueue*>( &queue ) );
-    return queue;
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::createValidationCacheEXT( const ValidationCacheCreateInfoEXT* pCreateInfo, const AllocationCallbacks* pAllocator, ValidationCacheEXT* pValidationCache, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkCreateValidationCacheEXT( m_device, reinterpret_cast<const VkValidationCacheCreateInfoEXT*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkValidationCacheEXT*>( pValidationCache ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<ValidationCacheEXT>::type Device::createValidationCacheEXT( const ValidationCacheCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    ValidationCacheEXT validationCache;
-    Result result = static_cast<Result>( d.vkCreateValidationCacheEXT( m_device, reinterpret_cast<const VkValidationCacheCreateInfoEXT*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkValidationCacheEXT*>( &validationCache ) ) );
-    return createResultValue( result, validationCache, VULKAN_HPP_NAMESPACE_STRING"::Device::createValidationCacheEXT" );
-  }
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<ValidationCacheEXT,Dispatch>>::type Device::createValidationCacheEXTUnique( const ValidationCacheCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    ValidationCacheEXT validationCache;
-    Result result = static_cast<Result>( d.vkCreateValidationCacheEXT( m_device, reinterpret_cast<const VkValidationCacheCreateInfoEXT*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkValidationCacheEXT*>( &validationCache ) ) );
-
-    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
-    return createResultValue<ValidationCacheEXT,Dispatch>( result, validationCache, VULKAN_HPP_NAMESPACE_STRING"::Device::createValidationCacheEXTUnique", deleter );
-  }
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( ValidationCacheEXT validationCache, const AllocationCallbacks* pAllocator, Dispatch const &d) const
-  {
-    d.vkDestroyValidationCacheEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( ValidationCacheEXT validationCache, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    d.vkDestroyValidationCacheEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroy( ValidationCacheEXT validationCache, const AllocationCallbacks* pAllocator, Dispatch const &d) const
-  {
-    d.vkDestroyValidationCacheEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroy( ValidationCacheEXT validationCache, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    d.vkDestroyValidationCacheEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::getValidationCacheDataEXT( ValidationCacheEXT validationCache, size_t* pDataSize, void* pData, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), pDataSize, pData ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t,Allocator>>::type Device::getValidationCacheDataEXT( ValidationCacheEXT validationCache, Dispatch const &d ) const
+  template<typename Allocator , typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t,Allocator>>::type Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Dispatch const &d ) const
   {
     std::vector<uint8_t,Allocator> data;
     size_t dataSize;
     Result result;
     do
     {
-      result = static_cast<Result>( d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, nullptr ) );
+      result = static_cast<Result>( d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, nullptr ) );
       if ( ( result == Result::eSuccess ) && dataSize )
       {
         data.resize( dataSize );
-        result = static_cast<Result>( d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, reinterpret_cast<void*>( data.data() ) ) );
+        result = static_cast<Result>( d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, reinterpret_cast<void*>( data.data() ) ) );
       }
     } while ( result == Result::eIncomplete );
-    VULKAN_HPP_ASSERT( dataSize <= data.size() );
-    data.resize( dataSize );
-    return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING"::Device::getValidationCacheDataEXT" );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( dataSize <= data.size() );
+      data.resize( dataSize );
+    }
+    return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineCacheData" );
   }
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t,Allocator>>::type Device::getValidationCacheDataEXT( ValidationCacheEXT validationCache, Allocator const& vectorAllocator, Dispatch const &d ) const
+  template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, uint8_t>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t,Allocator>>::type Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Allocator const& vectorAllocator, Dispatch const &d ) const
   {
     std::vector<uint8_t,Allocator> data( vectorAllocator );
     size_t dataSize;
     Result result;
     do
     {
-      result = static_cast<Result>( d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, nullptr ) );
+      result = static_cast<Result>( d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, nullptr ) );
       if ( ( result == Result::eSuccess ) && dataSize )
       {
         data.resize( dataSize );
-        result = static_cast<Result>( d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, reinterpret_cast<void*>( data.data() ) ) );
+        result = static_cast<Result>( d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, reinterpret_cast<void*>( data.data() ) ) );
       }
     } while ( result == Result::eIncomplete );
-    VULKAN_HPP_ASSERT( dataSize <= data.size() );
-    data.resize( dataSize );
-    return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING"::Device::getValidationCacheDataEXT" );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( dataSize <= data.size() );
+      data.resize( dataSize );
+    }
+    return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineCacheData" );
   }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
   template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::mergeValidationCachesEXT( ValidationCacheEXT dstCache, uint32_t srcCacheCount, const ValidationCacheEXT* pSrcCaches, Dispatch const &d) const
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pInternalRepresentationCount, VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR* pInternalRepresentations, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
   {
-    return static_cast<Result>( d.vkMergeValidationCachesEXT( m_device, static_cast<VkValidationCacheEXT>( dstCache ), srcCacheCount, reinterpret_cast<const VkValidationCacheEXT*>( pSrcCaches ) ) );
+    return static_cast<Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR( m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR*>( pExecutableInfo ), pInternalRepresentationCount, reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR*>( pInternalRepresentations ) ) );
   }
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<void>::type Device::mergeValidationCachesEXT( ValidationCacheEXT dstCache, ArrayProxy<const ValidationCacheEXT> srcCaches, Dispatch const &d ) const
+  template<typename Allocator , typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PipelineExecutableInternalRepresentationKHR,Allocator>>::type Device::getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo, Dispatch const &d ) const
   {
-    Result result = static_cast<Result>( d.vkMergeValidationCachesEXT( m_device, static_cast<VkValidationCacheEXT>( dstCache ), srcCaches.size() , reinterpret_cast<const VkValidationCacheEXT*>( srcCaches.data() ) ) );
-    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::mergeValidationCachesEXT" );
+    std::vector<PipelineExecutableInternalRepresentationKHR,Allocator> internalRepresentations;
+    uint32_t internalRepresentationCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR( m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR*>( &executableInfo ), &internalRepresentationCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && internalRepresentationCount )
+      {
+        internalRepresentations.resize( internalRepresentationCount );
+        result = static_cast<Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR( m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR*>( &executableInfo ), &internalRepresentationCount, reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR*>( internalRepresentations.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() );
+      internalRepresentations.resize( internalRepresentationCount );
+    }
+    return createResultValue( result, internalRepresentations, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineExecutableInternalRepresentationsKHR" );
+  }
+  template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, PipelineExecutableInternalRepresentationKHR>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PipelineExecutableInternalRepresentationKHR,Allocator>>::type Device::getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo, Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<PipelineExecutableInternalRepresentationKHR,Allocator> internalRepresentations( vectorAllocator );
+    uint32_t internalRepresentationCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR( m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR*>( &executableInfo ), &internalRepresentationCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && internalRepresentationCount )
+      {
+        internalRepresentations.resize( internalRepresentationCount );
+        result = static_cast<Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR( m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR*>( &executableInfo ), &internalRepresentationCount, reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR*>( internalRepresentations.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() );
+      internalRepresentations.resize( internalRepresentationCount );
+    }
+    return createResultValue( result, internalRepresentations, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineExecutableInternalRepresentationsKHR" );
   }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
   template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo* pCreateInfo, DescriptorSetLayoutSupport* pSupport, Dispatch const &d) const
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR* pPipelineInfo, uint32_t* pExecutableCount, VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
   {
-    d.vkGetDescriptorSetLayoutSupport( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo*>( pCreateInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport*>( pSupport ) );
+    return static_cast<Result>( d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast<const VkPipelineInfoKHR*>( pPipelineInfo ), pExecutableCount, reinterpret_cast<VkPipelineExecutablePropertiesKHR*>( pProperties ) ) );
   }
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE DescriptorSetLayoutSupport Device::getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d ) const
+  template<typename Allocator , typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PipelineExecutablePropertiesKHR,Allocator>>::type Device::getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo, Dispatch const &d ) const
   {
-    DescriptorSetLayoutSupport support;
-    d.vkGetDescriptorSetLayoutSupport( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo*>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport*>( &support ) );
-    return support;
+    std::vector<PipelineExecutablePropertiesKHR,Allocator> properties;
+    uint32_t executableCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast<const VkPipelineInfoKHR*>( &pipelineInfo ), &executableCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && executableCount )
+      {
+        properties.resize( executableCount );
+        result = static_cast<Result>( d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast<const VkPipelineInfoKHR*>( &pipelineInfo ), &executableCount, reinterpret_cast<VkPipelineExecutablePropertiesKHR*>( properties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( executableCount <= properties.size() );
+      properties.resize( executableCount );
+    }
+    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineExecutablePropertiesKHR" );
   }
-  template <typename X, typename Y, typename ...Z, typename Dispatch>
-  VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d ) const
+  template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, PipelineExecutablePropertiesKHR>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PipelineExecutablePropertiesKHR,Allocator>>::type Device::getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo, Allocator const& vectorAllocator, Dispatch const &d ) const
   {
-    StructureChain<X, Y, Z...> structureChain;
-    DescriptorSetLayoutSupport& support = structureChain.template get<DescriptorSetLayoutSupport>();
-    d.vkGetDescriptorSetLayoutSupport( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo*>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport*>( &support ) );
-    return structureChain;
+    std::vector<PipelineExecutablePropertiesKHR,Allocator> properties( vectorAllocator );
+    uint32_t executableCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast<const VkPipelineInfoKHR*>( &pipelineInfo ), &executableCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && executableCount )
+      {
+        properties.resize( executableCount );
+        result = static_cast<Result>( d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast<const VkPipelineInfoKHR*>( &pipelineInfo ), &executableCount, reinterpret_cast<VkPipelineExecutablePropertiesKHR*>( properties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( executableCount <= properties.size() );
+      properties.resize( executableCount );
+    }
+    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineExecutablePropertiesKHR" );
   }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
   template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo* pCreateInfo, DescriptorSetLayoutSupport* pSupport, Dispatch const &d) const
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pStatisticCount, VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR* pStatistics, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
   {
-    d.vkGetDescriptorSetLayoutSupportKHR( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo*>( pCreateInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport*>( pSupport ) );
+    return static_cast<Result>( d.vkGetPipelineExecutableStatisticsKHR( m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR*>( pExecutableInfo ), pStatisticCount, reinterpret_cast<VkPipelineExecutableStatisticKHR*>( pStatistics ) ) );
   }
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE DescriptorSetLayoutSupport Device::getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d ) const
+  template<typename Allocator , typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PipelineExecutableStatisticKHR,Allocator>>::type Device::getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo, Dispatch const &d ) const
   {
-    DescriptorSetLayoutSupport support;
-    d.vkGetDescriptorSetLayoutSupportKHR( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo*>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport*>( &support ) );
-    return support;
+    std::vector<PipelineExecutableStatisticKHR,Allocator> statistics;
+    uint32_t statisticCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPipelineExecutableStatisticsKHR( m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR*>( &executableInfo ), &statisticCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && statisticCount )
+      {
+        statistics.resize( statisticCount );
+        result = static_cast<Result>( d.vkGetPipelineExecutableStatisticsKHR( m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR*>( &executableInfo ), &statisticCount, reinterpret_cast<VkPipelineExecutableStatisticKHR*>( statistics.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( statisticCount <= statistics.size() );
+      statistics.resize( statisticCount );
+    }
+    return createResultValue( result, statistics, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineExecutableStatisticsKHR" );
   }
-  template <typename X, typename Y, typename ...Z, typename Dispatch>
-  VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d ) const
+  template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, PipelineExecutableStatisticKHR>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PipelineExecutableStatisticKHR,Allocator>>::type Device::getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo, Allocator const& vectorAllocator, Dispatch const &d ) const
   {
-    StructureChain<X, Y, Z...> structureChain;
-    DescriptorSetLayoutSupport& support = structureChain.template get<DescriptorSetLayoutSupport>();
-    d.vkGetDescriptorSetLayoutSupportKHR( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo*>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport*>( &support ) );
-    return structureChain;
+    std::vector<PipelineExecutableStatisticKHR,Allocator> statistics( vectorAllocator );
+    uint32_t statisticCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPipelineExecutableStatisticsKHR( m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR*>( &executableInfo ), &statisticCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && statisticCount )
+      {
+        statistics.resize( statisticCount );
+        result = static_cast<Result>( d.vkGetPipelineExecutableStatisticsKHR( m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR*>( &executableInfo ), &statisticCount, reinterpret_cast<VkPipelineExecutableStatisticKHR*>( statistics.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( statisticCount <= statistics.size() );
+      statistics.resize( statisticCount );
+    }
+    return createResultValue( result, statistics, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineExecutableStatisticsKHR" );
   }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, uint64_t* pData, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetPrivateDataEXT( m_device, static_cast<VkObjectType>( objectType ), objectHandle, static_cast<VkPrivateDataSlotEXT>( privateDataSlot ), pData );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Device::getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    uint64_t data;
+    d.vkGetPrivateDataEXT( m_device, static_cast<VkObjectType>( objectType ), objectHandle, static_cast<VkPrivateDataSlotEXT>( privateDataSlot ), &data );
+    return data;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkGetQueryPoolResults( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, dataSize, pData, static_cast<VkDeviceSize>( stride ), static_cast<VkQueryResultFlags>( flags ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename T, typename Dispatch>
+  VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it.")
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getQueryPoolResults(  VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, ArrayProxy<T> const &data, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d  ) const
+  {
+        Result result = static_cast<Result>( d.vkGetQueryPoolResults( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, data.size() * sizeof( T ) , reinterpret_cast<void*>( data.data() ), static_cast<VkDeviceSize>( stride ), static_cast<VkQueryResultFlags>( flags ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getQueryPoolResults", { Result::eSuccess, Result::eNotReady } );
+
+  }
+
+  template <typename T, typename Allocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<T,Allocator>> Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( dataSize % sizeof( T ) == 0 );
+    std::vector<T,Allocator> data( dataSize / sizeof( T ) );
+    Result result = static_cast<Result>( d.vkGetQueryPoolResults( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, data.size(), reinterpret_cast<void*>( data.data() ), static_cast<VkDeviceSize>( stride ), static_cast<VkQueryResultFlags>( flags ) ) );
+    return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResults", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } );
+  }
+
+  template <typename T, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<T> Device::getQueryPoolResult( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const & d ) const
+  {
+    T data;
+    Result result = static_cast<Result>( d.vkGetQueryPoolResults( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, sizeof( T ), reinterpret_cast<void*>( &data ), static_cast<VkDeviceSize>( stride ), static_cast<VkQueryResultFlags>( flags ) ) );
+    return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResult", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename T, typename Dispatch>
+  VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it.")
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::getRayTracingCaptureReplayShaderGroupHandlesKHR(  VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy<T> const &data, Dispatch const &d  ) const
+  {
+        Result result = static_cast<Result>( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, data.size() * sizeof( T ) , reinterpret_cast<void*>( data.data() ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getRayTracingCaptureReplayShaderGroupHandlesKHR" );
+
+  }
+
+  template <typename T, typename Allocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<T,Allocator>>::type Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( dataSize % sizeof( T ) == 0 );
+    std::vector<T,Allocator> data( dataSize / sizeof( T ) );
+    Result result = static_cast<Result>( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, data.size(), reinterpret_cast<void*>( data.data() ) ) );
+    return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandlesKHR" );
+  }
+
+  template <typename T, typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<T>::type Device::getRayTracingCaptureReplayShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d ) const
+  {
+    T data;
+    Result result = static_cast<Result>( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, sizeof( T ), reinterpret_cast<void*>( &data ) ) );
+    return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandleKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesKHR( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename T, typename Dispatch>
+  VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it.")
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::getRayTracingShaderGroupHandlesKHR(  VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy<T> const &data, Dispatch const &d  ) const
+  {
+        Result result = static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesKHR( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, data.size() * sizeof( T ) , reinterpret_cast<void*>( data.data() ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getRayTracingShaderGroupHandlesKHR" );
+
+  }
+
+  template <typename T, typename Allocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<T,Allocator>>::type Device::getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( dataSize % sizeof( T ) == 0 );
+    std::vector<T,Allocator> data( dataSize / sizeof( T ) );
+    Result result = static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesKHR( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, data.size(), reinterpret_cast<void*>( data.data() ) ) );
+    return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesKHR" );
+  }
+
+  template <typename T, typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<T>::type Device::getRayTracingShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d ) const
+  {
+    T data;
+    Result result = static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesKHR( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, sizeof( T ), reinterpret_cast<void*>( &data ) ) );
+    return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandleKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename T, typename Dispatch>
+  VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it.")
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::getRayTracingShaderGroupHandlesNV(  VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy<T> const &data, Dispatch const &d  ) const
+  {
+        Result result = static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, data.size() * sizeof( T ) , reinterpret_cast<void*>( data.data() ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getRayTracingShaderGroupHandlesNV" );
+
+  }
+
+  template <typename T, typename Allocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<T,Allocator>>::type Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( dataSize % sizeof( T ) == 0 );
+    std::vector<T,Allocator> data( dataSize / sizeof( T ) );
+    Result result = static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, data.size(), reinterpret_cast<void*>( data.data() ) ) );
+    return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesNV" );
+  }
+
+  template <typename T, typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<T>::type Device::getRayTracingShaderGroupHandleNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d ) const
+  {
+    T data;
+    Result result = static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, sizeof( T ), reinterpret_cast<void*>( &data ) ) );
+    return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandleNV" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE* pDisplayTimingProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkGetRefreshCycleDurationGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast< VkRefreshCycleDurationGOOGLE *>( pDisplayTimingProperties ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE>::type Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE displayTimingProperties;
+    Result result = static_cast<Result>( d.vkGetRefreshCycleDurationGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast< VkRefreshCycleDurationGOOGLE *>( &displayTimingProperties ) ) );
+    return createResultValue( result, displayTimingProperties, VULKAN_HPP_NAMESPACE_STRING "::Device::getRefreshCycleDurationGOOGLE" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, VULKAN_HPP_NAMESPACE::Extent2D* pGranularity, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetRenderAreaGranularity( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast< VkExtent2D *>( pGranularity ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D Device::getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::Extent2D granularity;
+    d.vkGetRenderAreaGranularity( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast< VkExtent2D *>( &granularity ) );
+    return granularity;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t* pValue, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkGetSemaphoreCounterValue( m_device, static_cast<VkSemaphore>( semaphore ), pValue ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type Device::getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const & d ) const
+  {
+    uint64_t value;
+    Result result = static_cast<Result>( d.vkGetSemaphoreCounterValue( m_device, static_cast<VkSemaphore>( semaphore ), &value ) );
+    return createResultValue( result, value, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreCounterValue" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t* pValue, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkGetSemaphoreCounterValueKHR( m_device, static_cast<VkSemaphore>( semaphore ), pValue ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type Device::getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const & d ) const
+  {
+    uint64_t value;
+    Result result = static_cast<Result>( d.vkGetSemaphoreCounterValueKHR( m_device, static_cast<VkSemaphore>( semaphore ), &value ) );
+    return createResultValue( result, value, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreCounterValueKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast<const VkSemaphoreGetFdInfoKHR *>( pGetFdInfo ), pFd ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<int>::type Device::getSemaphoreFdKHR( const SemaphoreGetFdInfoKHR & getFdInfo, Dispatch const & d ) const
+  {
+    int fd;
+    Result result = static_cast<Result>( d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast<const VkSemaphoreGetFdInfoKHR *>( &getFdInfo ), &fd ) );
+    return createResultValue( result, fd, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreFdKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type Device::getSemaphoreWin32HandleKHR( const SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const
+  {
+    HANDLE handle;
+    Result result = static_cast<Result>( d.vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle ) );
+    return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreWin32HandleKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
   template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::getShaderInfoAMD( Pipeline pipeline, ShaderStageFlagBits shaderStage, ShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo, Dispatch const &d) const
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
   {
     return static_cast<Result>( d.vkGetShaderInfoAMD( m_device, static_cast<VkPipeline>( pipeline ), static_cast<VkShaderStageFlagBits>( shaderStage ), static_cast<VkShaderInfoTypeAMD>( infoType ), pInfoSize, pInfo ) );
   }
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t,Allocator>>::type Device::getShaderInfoAMD( Pipeline pipeline, ShaderStageFlagBits shaderStage, ShaderInfoTypeAMD infoType, Dispatch const &d ) const
+  template<typename Allocator , typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t,Allocator>>::type Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, Dispatch const &d ) const
   {
     std::vector<uint8_t,Allocator> info;
     size_t infoSize;
@@ -44675,12 +87237,15 @@
         result = static_cast<Result>( d.vkGetShaderInfoAMD( m_device, static_cast<VkPipeline>( pipeline ), static_cast<VkShaderStageFlagBits>( shaderStage ), static_cast<VkShaderInfoTypeAMD>( infoType ), &infoSize, reinterpret_cast<void*>( info.data() ) ) );
       }
     } while ( result == Result::eIncomplete );
-    VULKAN_HPP_ASSERT( infoSize <= info.size() );
-    info.resize( infoSize );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( infoSize <= info.size() );
+      info.resize( infoSize );
+    }
     return createResultValue( result, info, VULKAN_HPP_NAMESPACE_STRING"::Device::getShaderInfoAMD" );
   }
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t,Allocator>>::type Device::getShaderInfoAMD( Pipeline pipeline, ShaderStageFlagBits shaderStage, ShaderInfoTypeAMD infoType, Allocator const& vectorAllocator, Dispatch const &d ) const
+  template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, uint8_t>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t,Allocator>>::type Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, Allocator const& vectorAllocator, Dispatch const &d ) const
   {
     std::vector<uint8_t,Allocator> info( vectorAllocator );
     size_t infoSize;
@@ -44694,2946 +87259,1448 @@
         result = static_cast<Result>( d.vkGetShaderInfoAMD( m_device, static_cast<VkPipeline>( pipeline ), static_cast<VkShaderStageFlagBits>( shaderStage ), static_cast<VkShaderInfoTypeAMD>( infoType ), &infoSize, reinterpret_cast<void*>( info.data() ) ) );
       }
     } while ( result == Result::eIncomplete );
-    VULKAN_HPP_ASSERT( infoSize <= info.size() );
-    info.resize( infoSize );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( infoSize <= info.size() );
+      info.resize( infoSize );
+    }
     return createResultValue( result, info, VULKAN_HPP_NAMESPACE_STRING"::Device::getShaderInfoAMD" );
   }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::getCalibratedTimestampsEXT( uint32_t timestampCount, const CalibratedTimestampInfoEXT* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation, Dispatch const &d) const
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
-    return static_cast<Result>( d.vkGetCalibratedTimestampsEXT( m_device, timestampCount, reinterpret_cast<const VkCalibratedTimestampInfoEXT*>( pTimestampInfos ), pTimestamps, pMaxDeviation ) );
+    return static_cast<Result>( d.vkGetSwapchainCounterEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ), static_cast<VkSurfaceCounterFlagBitsEXT>( counter ), pCounterValue ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type Device::getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, Dispatch const & d ) const
+  {
+    uint64_t counterValue;
+    Result result = static_cast<Result>( d.vkGetSwapchainCounterEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ), static_cast<VkSurfaceCounterFlagBitsEXT>( counter ), &counterValue ) );
+    return createResultValue( result, counterValue, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainCounterEXT" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VULKAN_HPP_NAMESPACE::Image* pSwapchainImages, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), pSwapchainImageCount, reinterpret_cast<VkImage*>( pSwapchainImages ) ) );
   }
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator , typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<Image,Allocator>>::type Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d ) const
+  {
+    std::vector<Image,Allocator> swapchainImages;
+    uint32_t swapchainImageCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && swapchainImageCount )
+      {
+        swapchainImages.resize( swapchainImageCount );
+        result = static_cast<Result>( d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, reinterpret_cast<VkImage*>( swapchainImages.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() );
+      swapchainImages.resize( swapchainImageCount );
+    }
+    return createResultValue( result, swapchainImages, VULKAN_HPP_NAMESPACE_STRING"::Device::getSwapchainImagesKHR" );
+  }
+  template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, Image>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<Image,Allocator>>::type Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<Image,Allocator> swapchainImages( vectorAllocator );
+    uint32_t swapchainImageCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && swapchainImageCount )
+      {
+        swapchainImages.resize( swapchainImageCount );
+        result = static_cast<Result>( d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, reinterpret_cast<VkImage*>( swapchainImages.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() );
+      swapchainImages.resize( swapchainImageCount );
+    }
+    return createResultValue( result, swapchainImages, VULKAN_HPP_NAMESPACE_STRING"::Device::getSwapchainImagesKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkGetSwapchainStatusKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
+  }
+#else
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const
+  {
+    Result result = static_cast<Result>( d.vkGetSwapchainStatusKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainStatusKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
   template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<uint64_t>::type Device::getCalibratedTimestampsEXT( ArrayProxy<const CalibratedTimestampInfoEXT> timestampInfos, ArrayProxy<uint64_t> timestamps, Dispatch const &d ) const
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, size_t* pDataSize, void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), pDataSize, pData ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator , typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t,Allocator>>::type Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Dispatch const &d ) const
+  {
+    std::vector<uint8_t,Allocator> data;
+    size_t dataSize;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, nullptr ) );
+      if ( ( result == Result::eSuccess ) && dataSize )
+      {
+        data.resize( dataSize );
+        result = static_cast<Result>( d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, reinterpret_cast<void*>( data.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( dataSize <= data.size() );
+      data.resize( dataSize );
+    }
+    return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING"::Device::getValidationCacheDataEXT" );
+  }
+  template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, uint8_t>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t,Allocator>>::type Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<uint8_t,Allocator> data( vectorAllocator );
+    size_t dataSize;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, nullptr ) );
+      if ( ( result == Result::eSuccess ) && dataSize )
+      {
+        data.resize( dataSize );
+        result = static_cast<Result>( d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, reinterpret_cast<void*>( data.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( dataSize <= data.size() );
+      data.resize( dataSize );
+    }
+    return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING"::Device::getValidationCacheDataEXT" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR* pImportFenceFdInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkImportFenceFdKHR( m_device, reinterpret_cast<const VkImportFenceFdInfoKHR *>( pImportFenceFdInfo ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::importFenceFdKHR( const ImportFenceFdInfoKHR & importFenceFdInfo, Dispatch const & d ) const
+  {
+    Result result = static_cast<Result>( d.vkImportFenceFdKHR( m_device, reinterpret_cast<const VkImportFenceFdInfoKHR *>( &importFenceFdInfo ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceFdKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast<const VkImportFenceWin32HandleInfoKHR *>( pImportFenceWin32HandleInfo ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, Dispatch const & d ) const
+  {
+    Result result = static_cast<Result>( d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast<const VkImportFenceWin32HandleInfoKHR *>( &importFenceWin32HandleInfo ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceWin32HandleKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast<const VkImportSemaphoreFdInfoKHR *>( pImportSemaphoreFdInfo ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, Dispatch const & d ) const
+  {
+    Result result = static_cast<Result>( d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast<const VkImportSemaphoreFdInfoKHR *>( &importSemaphoreFdInfo ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreFdKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR *>( pImportSemaphoreWin32HandleInfo ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo, Dispatch const & d ) const
+  {
+    Result result = static_cast<Result>( d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR *>( &importSemaphoreWin32HandleInfo ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreWin32HandleKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL* pInitializeInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkInitializePerformanceApiINTEL( m_device, reinterpret_cast<const VkInitializePerformanceApiInfoINTEL *>( pInitializeInfo ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::initializePerformanceApiINTEL( const InitializePerformanceApiInfoINTEL & initializeInfo, Dispatch const & d ) const
+  {
+    Result result = static_cast<Result>( d.vkInitializePerformanceApiINTEL( m_device, reinterpret_cast<const VkInitializePerformanceApiInfoINTEL *>( &initializeInfo ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::initializePerformanceApiINTEL" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::invalidateMappedMemoryRanges( uint32_t memoryRangeCount, const VULKAN_HPP_NAMESPACE::MappedMemoryRange* pMemoryRanges, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkInvalidateMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange *>( pMemoryRanges ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::invalidateMappedMemoryRanges( ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges, Dispatch const & d ) const
+  {
+    Result result = static_cast<Result>( d.vkInvalidateMappedMemoryRanges( m_device, memoryRanges.size(), reinterpret_cast<const VkMappedMemoryRange *>( memoryRanges.data() ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::invalidateMappedMemoryRanges" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::DeviceSize size, VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, void** ppData, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkMapMemory( m_device, static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( offset ), static_cast<VkDeviceSize>( size ), static_cast<VkMemoryMapFlags>( flags ), ppData ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void*>::type Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::DeviceSize size, VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, Dispatch const & d ) const
+  {
+    void* pData;
+    Result result = static_cast<Result>( d.vkMapMemory( m_device, static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( offset ), static_cast<VkDeviceSize>( size ), static_cast<VkMemoryMapFlags>( flags ), &pData ) );
+    return createResultValue( result, pData, VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, uint32_t srcCacheCount, const VULKAN_HPP_NAMESPACE::PipelineCache* pSrcCaches, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkMergePipelineCaches( m_device, static_cast<VkPipelineCache>( dstCache ), srcCacheCount, reinterpret_cast<const VkPipelineCache *>( pSrcCaches ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::PipelineCache> const & srcCaches, Dispatch const & d ) const
+  {
+    Result result = static_cast<Result>( d.vkMergePipelineCaches( m_device, static_cast<VkPipelineCache>( dstCache ), srcCaches.size(), reinterpret_cast<const VkPipelineCache *>( srcCaches.data() ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mergePipelineCaches" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, uint32_t srcCacheCount, const VULKAN_HPP_NAMESPACE::ValidationCacheEXT* pSrcCaches, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkMergeValidationCachesEXT( m_device, static_cast<VkValidationCacheEXT>( dstCache ), srcCacheCount, reinterpret_cast<const VkValidationCacheEXT *>( pSrcCaches ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ValidationCacheEXT> const & srcCaches, Dispatch const & d ) const
+  {
+    Result result = static_cast<Result>( d.vkMergeValidationCachesEXT( m_device, static_cast<VkValidationCacheEXT>( dstCache ), srcCaches.size(), reinterpret_cast<const VkValidationCacheEXT *>( srcCaches.data() ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mergeValidationCachesEXT" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT* pDeviceEventInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Fence* pFence, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkRegisterDeviceEventEXT( m_device, reinterpret_cast<const VkDeviceEventInfoEXT *>( pDeviceEventInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkFence *>( pFence ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type Device::registerEventEXT( const DeviceEventInfoEXT & deviceEventInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::Fence fence;
+    Result result = static_cast<Result>( d.vkRegisterDeviceEventEXT( m_device, reinterpret_cast<const VkDeviceEventInfoEXT *>( &deviceEventInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkFence *>( &fence ) ) );
+    return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXT" );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type Device::registerEventEXTUnique( const DeviceEventInfoEXT & deviceEventInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::Fence fence;
+    Result result = static_cast<Result>( d.vkRegisterDeviceEventEXT( m_device, reinterpret_cast<const VkDeviceEventInfoEXT *>( &deviceEventInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkFence *>( &fence ) ) );
+    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+    return createResultValue<VULKAN_HPP_NAMESPACE::Fence, Dispatch>( result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXTUnique", deleter );
+  }
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT* pDisplayEventInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Fence* pFence, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkRegisterDisplayEventEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayEventInfoEXT *>( pDisplayEventInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkFence *>( pFence ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayEventInfoEXT & displayEventInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::Fence fence;
+    Result result = static_cast<Result>( d.vkRegisterDisplayEventEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayEventInfoEXT *>( &displayEventInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkFence *>( &fence ) ) );
+    return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXT" );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type Device::registerDisplayEventEXTUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayEventInfoEXT & displayEventInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::Fence fence;
+    Result result = static_cast<Result>( d.vkRegisterDisplayEventEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayEventInfoEXT *>( &displayEventInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkFence *>( &fence ) ) );
+    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+    return createResultValue<VULKAN_HPP_NAMESPACE::Fence, Dispatch>( result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXTUnique", deleter );
+  }
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
+  }
+#else
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const
+  {
+    Result result = static_cast<Result>( d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releaseFullScreenExclusiveModeEXT" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
+  }
+#else
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const
+  {
+    Result result = static_cast<Result>( d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releasePerformanceConfigurationINTEL" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
+  }
+#else
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const
+  {
+    Result result = static_cast<Result>( d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::release" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::releaseProfilingLockKHR( Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkReleaseProfilingLockKHR( m_device );
+  }
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkResetCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) ) );
+  }
+#else
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, Dispatch const & d ) const
+  {
+    Result result = static_cast<Result>( d.vkResetCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetCommandPool" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkResetDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) ) );
+  }
+#else
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags, Dispatch const & d ) const
+  {
+    Result result = static_cast<Result>( d.vkResetDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetDescriptorPool" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkResetEvent( m_device, static_cast<VkEvent>( event ) ) );
+  }
+#else
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const
+  {
+    Result result = static_cast<Result>( d.vkResetEvent( m_device, static_cast<VkEvent>( event ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetEvent" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetFences( uint32_t fenceCount, const VULKAN_HPP_NAMESPACE::Fence* pFences, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkResetFences( m_device, fenceCount, reinterpret_cast<const VkFence *>( pFences ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::resetFences( ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences, Dispatch const & d ) const
+  {
+    Result result = static_cast<Result>( d.vkResetFences( m_device, fences.size(), reinterpret_cast<const VkFence *>( fences.data() ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetFences" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkResetQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
+  }
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkResetQueryPoolEXT( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT* pNameInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT *>( pNameInfo ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setDebugUtilsObjectNameEXT( const DebugUtilsObjectNameInfoEXT & nameInfo, Dispatch const & d ) const
+  {
+    Result result = static_cast<Result>( d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT *>( &nameInfo ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectNameEXT" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT* pTagInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT *>( pTagInfo ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setDebugUtilsObjectTagEXT( const DebugUtilsObjectTagInfoEXT & tagInfo, Dispatch const & d ) const
+  {
+    Result result = static_cast<Result>( d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT *>( &tagInfo ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectTagEXT" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkSetEvent( m_device, static_cast<VkEvent>( event ) ) );
+  }
+#else
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const
+  {
+    Result result = static_cast<Result>( d.vkSetEvent( m_device, static_cast<VkEvent>( event ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setEvent" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( uint32_t swapchainCount, const VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains, const VULKAN_HPP_NAMESPACE::HdrMetadataEXT* pMetadata, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkSetHdrMetadataEXT( m_device, swapchainCount, reinterpret_cast<const VkSwapchainKHR *>( pSwapchains ), reinterpret_cast<const VkHdrMetadataEXT *>( pMetadata ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const & swapchains, ArrayProxy<const VULKAN_HPP_NAMESPACE::HdrMetadataEXT> const & metadata, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
   {
 #ifdef VULKAN_HPP_NO_EXCEPTIONS
-    VULKAN_HPP_ASSERT( timestampInfos.size() == timestamps.size() );
+    VULKAN_HPP_ASSERT( swapchains.size() == metadata.size() );
 #else
-    if ( timestampInfos.size() != timestamps.size() )
-    {
-      throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT: timestampInfos.size() != timestamps.size()" );
-    }
-#endif  // VULKAN_HPP_NO_EXCEPTIONS
-    uint64_t maxDeviation;
-    Result result = static_cast<Result>( d.vkGetCalibratedTimestampsEXT( m_device, timestampInfos.size() , reinterpret_cast<const VkCalibratedTimestampInfoEXT*>( timestampInfos.data() ), timestamps.data(), &maxDeviation ) );
-    return createResultValue( result, maxDeviation, VULKAN_HPP_NAMESPACE_STRING"::Device::getCalibratedTimestampsEXT" );
+    if ( swapchains.size() != metadata.size() )
+  {
+    throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::setHdrMetadataEXT: swapchains.size() != metadata.size()" );
+  }
+#endif  /*VULKAN_HPP_NO_EXCEPTIONS*/
+
+    d.vkSetHdrMetadataEXT( m_device, swapchains.size(), reinterpret_cast<const VkSwapchainKHR *>( swapchains.data() ), reinterpret_cast<const VkHdrMetadataEXT *>( metadata.data() ) );
   }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectNameEXT( const DebugUtilsObjectNameInfoEXT* pNameInfo, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT*>( pNameInfo ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<void>::type Device::setDebugUtilsObjectNameEXT( const DebugUtilsObjectNameInfoEXT & nameInfo, Dispatch const &d ) const
-  {
-    Result result = static_cast<Result>( d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT*>( &nameInfo ) ) );
-    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::setDebugUtilsObjectNameEXT" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectTagEXT( const DebugUtilsObjectTagInfoEXT* pTagInfo, Dispatch const &d) const
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain, VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
-    return static_cast<Result>( d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT*>( pTagInfo ) ) );
+    d.vkSetLocalDimmingAMD( m_device, static_cast<VkSwapchainKHR>( swapChain ), static_cast<VkBool32>( localDimmingEnable ) );
   }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<void>::type Device::setDebugUtilsObjectTagEXT( const DebugUtilsObjectTagInfoEXT & tagInfo, Dispatch const &d ) const
-  {
-    Result result = static_cast<Result>( d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT*>( &tagInfo ) ) );
-    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::setDebugUtilsObjectTagEXT" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::getMemoryHostPointerPropertiesEXT( ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, MemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkGetMemoryHostPointerPropertiesEXT( m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), pHostPointer, reinterpret_cast<VkMemoryHostPointerPropertiesEXT*>( pMemoryHostPointerProperties ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<MemoryHostPointerPropertiesEXT>::type Device::getMemoryHostPointerPropertiesEXT( ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, Dispatch const &d ) const
-  {
-    MemoryHostPointerPropertiesEXT memoryHostPointerProperties;
-    Result result = static_cast<Result>( d.vkGetMemoryHostPointerPropertiesEXT( m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), pHostPointer, reinterpret_cast<VkMemoryHostPointerPropertiesEXT*>( &memoryHostPointerProperties ) ) );
-    return createResultValue( result, memoryHostPointerProperties, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryHostPointerPropertiesEXT" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::createRenderPass2KHR( const RenderPassCreateInfo2KHR* pCreateInfo, const AllocationCallbacks* pAllocator, RenderPass* pRenderPass, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkCreateRenderPass2KHR( m_device, reinterpret_cast<const VkRenderPassCreateInfo2KHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkRenderPass*>( pRenderPass ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<RenderPass>::type Device::createRenderPass2KHR( const RenderPassCreateInfo2KHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    RenderPass renderPass;
-    Result result = static_cast<Result>( d.vkCreateRenderPass2KHR( m_device, reinterpret_cast<const VkRenderPassCreateInfo2KHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkRenderPass*>( &renderPass ) ) );
-    return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING"::Device::createRenderPass2KHR" );
-  }
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<RenderPass,Dispatch>>::type Device::createRenderPass2KHRUnique( const RenderPassCreateInfo2KHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    RenderPass renderPass;
-    Result result = static_cast<Result>( d.vkCreateRenderPass2KHR( m_device, reinterpret_cast<const VkRenderPassCreateInfo2KHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkRenderPass*>( &renderPass ) ) );
-
-    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
-    return createResultValue<RenderPass,Dispatch>( result, renderPass, VULKAN_HPP_NAMESPACE_STRING"::Device::createRenderPass2KHRUnique", deleter );
-  }
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VK_USE_PLATFORM_ANDROID_ANDROID
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer* buffer, AndroidHardwareBufferPropertiesANDROID* pProperties, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID*>( pProperties ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<AndroidHardwareBufferPropertiesANDROID>::type Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const &d ) const
-  {
-    AndroidHardwareBufferPropertiesANDROID properties;
-    Result result = static_cast<Result>( d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID*>( &properties ) ) );
-    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::Device::getAndroidHardwareBufferPropertiesANDROID" );
-  }
-  template <typename X, typename Y, typename ...Z, typename Dispatch>
-  VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const &d ) const
-  {
-    StructureChain<X, Y, Z...> structureChain;
-    AndroidHardwareBufferPropertiesANDROID& properties = structureChain.template get<AndroidHardwareBufferPropertiesANDROID>();
-    Result result = static_cast<Result>( d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID*>( &properties ) ) );
-    return createResultValue( result, structureChain, VULKAN_HPP_NAMESPACE_STRING"::Device::getAndroidHardwareBufferPropertiesANDROID" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/
-
-#ifdef VK_USE_PLATFORM_ANDROID_ANDROID
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::getMemoryAndroidHardwareBufferANDROID( const MemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkGetMemoryAndroidHardwareBufferANDROID( m_device, reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID*>( pInfo ), pBuffer ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<struct AHardwareBuffer*>::type Device::getMemoryAndroidHardwareBufferANDROID( const MemoryGetAndroidHardwareBufferInfoANDROID & info, Dispatch const &d ) const
-  {
-    struct AHardwareBuffer* buffer;
-    Result result = static_cast<Result>( d.vkGetMemoryAndroidHardwareBufferANDROID( m_device, reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID*>( &info ), &buffer ) );
-    return createResultValue( result, buffer, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryAndroidHardwareBufferANDROID" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/
 
 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::compileDeferredNV( Pipeline pipeline, uint32_t shader, Dispatch const &d) const
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, uint64_t data, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
-    return static_cast<Result>( d.vkCompileDeferredNV( m_device, static_cast<VkPipeline>( pipeline ), shader ) );
+    return static_cast<Result>( d.vkSetPrivateDataEXT( m_device, static_cast<VkObjectType>( objectType ), objectHandle, static_cast<VkPrivateDataSlotEXT>( privateDataSlot ), data ) );
   }
 #else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<void>::type Device::compileDeferredNV( Pipeline pipeline, uint32_t shader, Dispatch const &d ) const
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, uint64_t data, Dispatch const & d ) const
   {
-    Result result = static_cast<Result>( d.vkCompileDeferredNV( m_device, static_cast<VkPipeline>( pipeline ), shader ) );
-    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::compileDeferredNV" );
+    Result result = static_cast<Result>( d.vkSetPrivateDataEXT( m_device, static_cast<VkObjectType>( objectType ), objectHandle, static_cast<VkPrivateDataSlotEXT>( privateDataSlot ), data ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateDataEXT" );
   }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::createAccelerationStructureNV( const AccelerationStructureCreateInfoNV* pCreateInfo, const AllocationCallbacks* pAllocator, AccelerationStructureNV* pAccelerationStructure, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkCreateAccelerationStructureNV( m_device, reinterpret_cast<const VkAccelerationStructureCreateInfoNV*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkAccelerationStructureNV*>( pAccelerationStructure ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<AccelerationStructureNV>::type Device::createAccelerationStructureNV( const AccelerationStructureCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    AccelerationStructureNV accelerationStructure;
-    Result result = static_cast<Result>( d.vkCreateAccelerationStructureNV( m_device, reinterpret_cast<const VkAccelerationStructureCreateInfoNV*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkAccelerationStructureNV*>( &accelerationStructure ) ) );
-    return createResultValue( result, accelerationStructure, VULKAN_HPP_NAMESPACE_STRING"::Device::createAccelerationStructureNV" );
-  }
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<AccelerationStructureNV,Dispatch>>::type Device::createAccelerationStructureNVUnique( const AccelerationStructureCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    AccelerationStructureNV accelerationStructure;
-    Result result = static_cast<Result>( d.vkCreateAccelerationStructureNV( m_device, reinterpret_cast<const VkAccelerationStructureCreateInfoNV*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkAccelerationStructureNV*>( &accelerationStructure ) ) );
 
-    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
-    return createResultValue<AccelerationStructureNV,Dispatch>( result, accelerationStructure, VULKAN_HPP_NAMESPACE_STRING"::Device::createAccelerationStructureNVUnique", deleter );
-  }
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( AccelerationStructureNV accelerationStructure, const AllocationCallbacks* pAllocator, Dispatch const &d) const
-  {
-    d.vkDestroyAccelerationStructureNV( m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( AccelerationStructureNV accelerationStructure, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo* pSignalInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
-    d.vkDestroyAccelerationStructureNV( m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+    return static_cast<Result>( d.vkSignalSemaphore( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( pSignalInfo ) ) );
   }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroy( AccelerationStructureNV accelerationStructure, const AllocationCallbacks* pAllocator, Dispatch const &d) const
-  {
-    d.vkDestroyAccelerationStructureNV( m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
-  }
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::destroy( AccelerationStructureNV accelerationStructure, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::signalSemaphore( const SemaphoreSignalInfo & signalInfo, Dispatch const & d ) const
   {
-    d.vkDestroyAccelerationStructureNV( m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+    Result result = static_cast<Result>( d.vkSignalSemaphore( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( &signalInfo ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphore" );
   }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Device::getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV* pInfo, MemoryRequirements2KHR* pMemoryRequirements, Dispatch const &d) const
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo* pSignalInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
-    d.vkGetAccelerationStructureMemoryRequirementsNV( m_device, reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV*>( pInfo ), reinterpret_cast<VkMemoryRequirements2KHR*>( pMemoryRequirements ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE MemoryRequirements2KHR Device::getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV & info, Dispatch const &d ) const
-  {
-    MemoryRequirements2KHR memoryRequirements;
-    d.vkGetAccelerationStructureMemoryRequirementsNV( m_device, reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV*>( &info ), reinterpret_cast<VkMemoryRequirements2KHR*>( &memoryRequirements ) );
-    return memoryRequirements;
+    return static_cast<Result>( d.vkSignalSemaphoreKHR( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( pSignalInfo ) ) );
   }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::bindAccelerationStructureMemoryNV( uint32_t bindInfoCount, const BindAccelerationStructureMemoryInfoNV* pBindInfos, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkBindAccelerationStructureMemoryNV( m_device, bindInfoCount, reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV*>( pBindInfos ) ) );
-  }
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<void>::type Device::bindAccelerationStructureMemoryNV( ArrayProxy<const BindAccelerationStructureMemoryInfoNV> bindInfos, Dispatch const &d ) const
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::signalSemaphoreKHR( const SemaphoreSignalInfo & signalInfo, Dispatch const & d ) const
   {
-    Result result = static_cast<Result>( d.vkBindAccelerationStructureMemoryNV( m_device, bindInfos.size() , reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV*>( bindInfos.data() ) ) );
-    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindAccelerationStructureMemoryNV" );
+    Result result = static_cast<Result>( d.vkSignalSemaphoreKHR( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( &signalInfo ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphoreKHR" );
   }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::getRayTracingShaderGroupHandlesNV( Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template <typename T, typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<void>::type Device::getRayTracingShaderGroupHandlesNV( Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy<T> data, Dispatch const &d ) const
-  {
-    Result result = static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, data.size() * sizeof( T ) , reinterpret_cast<void*>( data.data() ) ) );
-    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getRayTracingShaderGroupHandlesNV" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::getAccelerationStructureHandleNV( AccelerationStructureNV accelerationStructure, size_t dataSize, void* pData, Dispatch const &d) const
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
-    return static_cast<Result>( d.vkGetAccelerationStructureHandleNV( m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), dataSize, pData ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template <typename T, typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<void>::type Device::getAccelerationStructureHandleNV( AccelerationStructureNV accelerationStructure, ArrayProxy<T> data, Dispatch const &d ) const
-  {
-    Result result = static_cast<Result>( d.vkGetAccelerationStructureHandleNV( m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), data.size() * sizeof( T ) , reinterpret_cast<void*>( data.data() ) ) );
-    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getAccelerationStructureHandleNV" );
+    d.vkTrimCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) );
   }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::createRayTracingPipelinesNV( PipelineCache pipelineCache, uint32_t createInfoCount, const RayTracingPipelineCreateInfoNV* pCreateInfos, const AllocationCallbacks* pAllocator, Pipeline* pPipelines, Dispatch const &d) const
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
-    return static_cast<Result>( d.vkCreateRayTracingPipelinesNV( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfoCount, reinterpret_cast<const VkRayTracingPipelineCreateInfoNV*>( pCreateInfos ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkPipeline*>( pPipelines ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<Pipeline,Allocator>>::type Device::createRayTracingPipelinesNV( PipelineCache pipelineCache, ArrayProxy<const RayTracingPipelineCreateInfoNV> createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    std::vector<Pipeline,Allocator> pipelines( createInfos.size() );
-    Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesNV( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkRayTracingPipelineCreateInfoNV*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( pipelines.data() ) ) );
-    return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelinesNV" );
-  }
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<Pipeline,Allocator>>::type Device::createRayTracingPipelinesNV( PipelineCache pipelineCache, ArrayProxy<const RayTracingPipelineCreateInfoNV> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const
-  {
-    std::vector<Pipeline,Allocator> pipelines( createInfos.size(), vectorAllocator );
-    Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesNV( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkRayTracingPipelineCreateInfoNV*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( pipelines.data() ) ) );
-    return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelinesNV" );
-  }
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<Pipeline>::type Device::createRayTracingPipelineNV( PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    Pipeline pipeline;
-    Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesNV( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1 , reinterpret_cast<const VkRayTracingPipelineCreateInfoNV*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( &pipeline ) ) );
-    return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelineNV" );
+    d.vkTrimCommandPoolKHR( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) );
   }
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>>::type Device::createRayTracingPipelinesNVUnique( PipelineCache pipelineCache, ArrayProxy<const RayTracingPipelineCreateInfoNV> createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    static_assert( sizeof( Pipeline ) <= sizeof( UniquePipeline ), "Pipeline is greater than UniquePipeline!" );
-    std::vector<UniquePipeline, Allocator> pipelines;
-    pipelines.reserve( createInfos.size() );
-    Pipeline* buffer = reinterpret_cast<Pipeline*>( reinterpret_cast<char*>( pipelines.data() ) + createInfos.size() * ( sizeof( UniquePipeline ) - sizeof( Pipeline ) ) );
-    Result result = static_cast<Result>(d.vkCreateRayTracingPipelinesNV( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkRayTracingPipelineCreateInfoNV*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( buffer ) ) );
 
-    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
-    for ( size_t i=0 ; i<createInfos.size() ; i++ )
-    {
-      pipelines.push_back( UniquePipeline( buffer[i], deleter ) );
-    }
 
-    return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique" );
-  }
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>>::type Device::createRayTracingPipelinesNVUnique( PipelineCache pipelineCache, ArrayProxy<const RayTracingPipelineCreateInfoNV> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::uninitializePerformanceApiINTEL( Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
-    static_assert( sizeof( Pipeline ) <= sizeof( UniquePipeline ), "Pipeline is greater than UniquePipeline!" );
-    std::vector<UniquePipeline, Allocator> pipelines;
-    pipelines.reserve( createInfos.size() );
-    Pipeline* buffer = reinterpret_cast<Pipeline*>( reinterpret_cast<char*>( pipelines.data() ) + createInfos.size() * ( sizeof( UniquePipeline ) - sizeof( Pipeline ) ) );
-    Result result = static_cast<Result>(d.vkCreateRayTracingPipelinesNV( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkRayTracingPipelineCreateInfoNV*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( buffer ) ) );
-
-    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
-    for ( size_t i=0 ; i<createInfos.size() ; i++ )
-    {
-      pipelines.push_back( UniquePipeline( buffer[i], deleter ) );
-    }
-
-    return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique" );
+    d.vkUninitializePerformanceApiINTEL( m_device );
   }
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Pipeline,Dispatch>>::type Device::createRayTracingPipelineNVUnique( PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    Pipeline pipeline;
-    Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesNV( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1 , reinterpret_cast<const VkRayTracingPipelineCreateInfoNV*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( &pipeline ) ) );
 
-    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
-    return createResultValue<Pipeline,Dispatch>( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelineNVUnique", deleter );
-  }
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Device::getImageDrmFormatModifierPropertiesEXT( Image image, ImageDrmFormatModifierPropertiesEXT* pProperties, Dispatch const &d) const
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
-    return static_cast<Result>( d.vkGetImageDrmFormatModifierPropertiesEXT( m_device, static_cast<VkImage>( image ), reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT*>( pProperties ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<ImageDrmFormatModifierPropertiesEXT>::type Device::getImageDrmFormatModifierPropertiesEXT( Image image, Dispatch const &d ) const
-  {
-    ImageDrmFormatModifierPropertiesEXT properties;
-    Result result = static_cast<Result>( d.vkGetImageDrmFormatModifierPropertiesEXT( m_device, static_cast<VkImage>( image ), reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT*>( &properties ) ) );
-    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::Device::getImageDrmFormatModifierPropertiesEXT" );
+    d.vkUnmapMemory( m_device, static_cast<VkDeviceMemory>( memory ) );
   }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
 
-  template <typename Dispatch> class UniqueHandleTraits<Device,Dispatch> {public: using deleter = ObjectDestroy<NoParent,Dispatch>; };
-  using UniqueDevice = UniqueHandle<Device,DispatchLoaderStatic>;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-
-  class PhysicalDevice
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
-  public:
-    VULKAN_HPP_CONSTEXPR PhysicalDevice()
-      : m_physicalDevice(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_CONSTEXPR PhysicalDevice( std::nullptr_t )
-      : m_physicalDevice(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT PhysicalDevice( VkPhysicalDevice physicalDevice )
-      : m_physicalDevice( physicalDevice )
-    {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
-    PhysicalDevice & operator=(VkPhysicalDevice physicalDevice)
-    {
-      m_physicalDevice = physicalDevice;
-      return *this; 
-    }
-#endif
-
-    PhysicalDevice & operator=( std::nullptr_t )
-    {
-      m_physicalDevice = VK_NULL_HANDLE;
-      return *this;
-    }
-
-    bool operator==( PhysicalDevice const & rhs ) const
-    {
-      return m_physicalDevice == rhs.m_physicalDevice;
-    }
-
-    bool operator!=(PhysicalDevice const & rhs ) const
-    {
-      return m_physicalDevice != rhs.m_physicalDevice;
-    }
-
-    bool operator<(PhysicalDevice const & rhs ) const
-    {
-      return m_physicalDevice < rhs.m_physicalDevice;
-    }
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void getProperties( PhysicalDeviceProperties* pProperties, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    PhysicalDeviceProperties getProperties(Dispatch const &d = Dispatch() ) const;
-    template <typename X, typename Y, typename ...Z, typename Dispatch = DispatchLoaderStatic>
-    StructureChain<X, Y, Z...> getProperties(Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void getQueueFamilyProperties( uint32_t* pQueueFamilyPropertyCount, QueueFamilyProperties* pQueueFamilyProperties, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Allocator = std::allocator<QueueFamilyProperties>, typename Dispatch = DispatchLoaderStatic> 
-    std::vector<QueueFamilyProperties,Allocator> getQueueFamilyProperties(Dispatch const &d = Dispatch() ) const;
-    template <typename Allocator = std::allocator<QueueFamilyProperties>, typename Dispatch = DispatchLoaderStatic> 
-    std::vector<QueueFamilyProperties,Allocator> getQueueFamilyProperties(Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void getMemoryProperties( PhysicalDeviceMemoryProperties* pMemoryProperties, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    PhysicalDeviceMemoryProperties getMemoryProperties(Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void getFeatures( PhysicalDeviceFeatures* pFeatures, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    PhysicalDeviceFeatures getFeatures(Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void getFormatProperties( Format format, FormatProperties* pFormatProperties, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    FormatProperties getFormatProperties( Format format, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result getImageFormatProperties( Format format, ImageType type, ImageTiling tiling, ImageUsageFlags usage, ImageCreateFlags flags, ImageFormatProperties* pImageFormatProperties, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<ImageFormatProperties>::type getImageFormatProperties( Format format, ImageType type, ImageTiling tiling, ImageUsageFlags usage, ImageCreateFlags flags, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result createDevice( const DeviceCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Device* pDevice, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<Device>::type createDevice( const DeviceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-    template<typename Dispatch = DispatchLoaderStatic>
-    typename ResultValueType<UniqueHandle<Device,Dispatch>>::type createDeviceUnique( const DeviceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result enumerateDeviceLayerProperties( uint32_t* pPropertyCount, LayerProperties* pProperties, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Allocator = std::allocator<LayerProperties>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<LayerProperties,Allocator>>::type enumerateDeviceLayerProperties(Dispatch const &d = Dispatch() ) const;
-    template <typename Allocator = std::allocator<LayerProperties>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<LayerProperties,Allocator>>::type enumerateDeviceLayerProperties(Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result enumerateDeviceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, ExtensionProperties* pProperties, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Allocator = std::allocator<ExtensionProperties>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<ExtensionProperties,Allocator>>::type enumerateDeviceExtensionProperties( Optional<const std::string> layerName = nullptr, Dispatch const &d = Dispatch() ) const;
-    template <typename Allocator = std::allocator<ExtensionProperties>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<ExtensionProperties,Allocator>>::type enumerateDeviceExtensionProperties( Optional<const std::string> layerName, Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void getSparseImageFormatProperties( Format format, ImageType type, SampleCountFlagBits samples, ImageUsageFlags usage, ImageTiling tiling, uint32_t* pPropertyCount, SparseImageFormatProperties* pProperties, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Allocator = std::allocator<SparseImageFormatProperties>, typename Dispatch = DispatchLoaderStatic> 
-    std::vector<SparseImageFormatProperties,Allocator> getSparseImageFormatProperties( Format format, ImageType type, SampleCountFlagBits samples, ImageUsageFlags usage, ImageTiling tiling, Dispatch const &d = Dispatch() ) const;
-    template <typename Allocator = std::allocator<SparseImageFormatProperties>, typename Dispatch = DispatchLoaderStatic> 
-    std::vector<SparseImageFormatProperties,Allocator> getSparseImageFormatProperties( Format format, ImageType type, SampleCountFlagBits samples, ImageUsageFlags usage, ImageTiling tiling, Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result getDisplayPropertiesKHR( uint32_t* pPropertyCount, DisplayPropertiesKHR* pProperties, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Allocator = std::allocator<DisplayPropertiesKHR>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<DisplayPropertiesKHR,Allocator>>::type getDisplayPropertiesKHR(Dispatch const &d = Dispatch() ) const;
-    template <typename Allocator = std::allocator<DisplayPropertiesKHR>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<DisplayPropertiesKHR,Allocator>>::type getDisplayPropertiesKHR(Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result getDisplayPlanePropertiesKHR( uint32_t* pPropertyCount, DisplayPlanePropertiesKHR* pProperties, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Allocator = std::allocator<DisplayPlanePropertiesKHR>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<DisplayPlanePropertiesKHR,Allocator>>::type getDisplayPlanePropertiesKHR(Dispatch const &d = Dispatch() ) const;
-    template <typename Allocator = std::allocator<DisplayPlanePropertiesKHR>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<DisplayPlanePropertiesKHR,Allocator>>::type getDisplayPlanePropertiesKHR(Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, uint32_t* pDisplayCount, DisplayKHR* pDisplays, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Allocator = std::allocator<DisplayKHR>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<DisplayKHR,Allocator>>::type getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const &d = Dispatch() ) const;
-    template <typename Allocator = std::allocator<DisplayKHR>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<DisplayKHR,Allocator>>::type getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result getDisplayModePropertiesKHR( DisplayKHR display, uint32_t* pPropertyCount, DisplayModePropertiesKHR* pProperties, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Allocator = std::allocator<DisplayModePropertiesKHR>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<DisplayModePropertiesKHR,Allocator>>::type getDisplayModePropertiesKHR( DisplayKHR display, Dispatch const &d = Dispatch() ) const;
-    template <typename Allocator = std::allocator<DisplayModePropertiesKHR>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<DisplayModePropertiesKHR,Allocator>>::type getDisplayModePropertiesKHR( DisplayKHR display, Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result createDisplayModeKHR( DisplayKHR display, const DisplayModeCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, DisplayModeKHR* pMode, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<DisplayModeKHR>::type createDisplayModeKHR( DisplayKHR display, const DisplayModeCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result getDisplayPlaneCapabilitiesKHR( DisplayModeKHR mode, uint32_t planeIndex, DisplayPlaneCapabilitiesKHR* pCapabilities, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<DisplayPlaneCapabilitiesKHR>::type getDisplayPlaneCapabilitiesKHR( DisplayModeKHR mode, uint32_t planeIndex, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result getSurfaceSupportKHR( uint32_t queueFamilyIndex, SurfaceKHR surface, Bool32* pSupported, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<Bool32>::type getSurfaceSupportKHR( uint32_t queueFamilyIndex, SurfaceKHR surface, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result getSurfaceCapabilitiesKHR( SurfaceKHR surface, SurfaceCapabilitiesKHR* pSurfaceCapabilities, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<SurfaceCapabilitiesKHR>::type getSurfaceCapabilitiesKHR( SurfaceKHR surface, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result getSurfaceFormatsKHR( SurfaceKHR surface, uint32_t* pSurfaceFormatCount, SurfaceFormatKHR* pSurfaceFormats, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Allocator = std::allocator<SurfaceFormatKHR>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<SurfaceFormatKHR,Allocator>>::type getSurfaceFormatsKHR( SurfaceKHR surface, Dispatch const &d = Dispatch() ) const;
-    template <typename Allocator = std::allocator<SurfaceFormatKHR>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<SurfaceFormatKHR,Allocator>>::type getSurfaceFormatsKHR( SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result getSurfacePresentModesKHR( SurfaceKHR surface, uint32_t* pPresentModeCount, PresentModeKHR* pPresentModes, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Allocator = std::allocator<PresentModeKHR>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<PresentModeKHR,Allocator>>::type getSurfacePresentModesKHR( SurfaceKHR surface, Dispatch const &d = Dispatch() ) const;
-    template <typename Allocator = std::allocator<PresentModeKHR>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<PresentModeKHR,Allocator>>::type getSurfacePresentModesKHR( SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VK_USE_PLATFORM_WAYLAND_KHR
-    template<typename Dispatch = DispatchLoaderStatic>
-    Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display* display, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display & display, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-    template<typename Dispatch = DispatchLoaderStatic>
-    Bool32 getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, Dispatch const &d = Dispatch() ) const;
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-
-#ifdef VK_USE_PLATFORM_XLIB_KHR
-    template<typename Dispatch = DispatchLoaderStatic>
-    Bool32 getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display* dpy, VisualID visualID, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    Bool32 getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_XLIB_KHR*/
-
-#ifdef VK_USE_PLATFORM_XCB_KHR
-    template<typename Dispatch = DispatchLoaderStatic>
-    Bool32 getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    Bool32 getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t & connection, xcb_visualid_t visual_id, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_XCB_KHR*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result getExternalImageFormatPropertiesNV( Format format, ImageType type, ImageTiling tiling, ImageUsageFlags usage, ImageCreateFlags flags, ExternalMemoryHandleTypeFlagsNV externalHandleType, ExternalImageFormatPropertiesNV* pExternalImageFormatProperties, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<ExternalImageFormatPropertiesNV>::type getExternalImageFormatPropertiesNV( Format format, ImageType type, ImageTiling tiling, ImageUsageFlags usage, ImageCreateFlags flags, ExternalMemoryHandleTypeFlagsNV externalHandleType, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void getGeneratedCommandsPropertiesNVX( DeviceGeneratedCommandsFeaturesNVX* pFeatures, DeviceGeneratedCommandsLimitsNVX* pLimits, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    DeviceGeneratedCommandsLimitsNVX getGeneratedCommandsPropertiesNVX( DeviceGeneratedCommandsFeaturesNVX & features, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void getFeatures2( PhysicalDeviceFeatures2* pFeatures, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    PhysicalDeviceFeatures2 getFeatures2(Dispatch const &d = Dispatch() ) const;
-    template <typename X, typename Y, typename ...Z, typename Dispatch = DispatchLoaderStatic>
-    StructureChain<X, Y, Z...> getFeatures2(Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void getFeatures2KHR( PhysicalDeviceFeatures2* pFeatures, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    PhysicalDeviceFeatures2 getFeatures2KHR(Dispatch const &d = Dispatch() ) const;
-    template <typename X, typename Y, typename ...Z, typename Dispatch = DispatchLoaderStatic>
-    StructureChain<X, Y, Z...> getFeatures2KHR(Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void getProperties2( PhysicalDeviceProperties2* pProperties, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    PhysicalDeviceProperties2 getProperties2(Dispatch const &d = Dispatch() ) const;
-    template <typename X, typename Y, typename ...Z, typename Dispatch = DispatchLoaderStatic>
-    StructureChain<X, Y, Z...> getProperties2(Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void getProperties2KHR( PhysicalDeviceProperties2* pProperties, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    PhysicalDeviceProperties2 getProperties2KHR(Dispatch const &d = Dispatch() ) const;
-    template <typename X, typename Y, typename ...Z, typename Dispatch = DispatchLoaderStatic>
-    StructureChain<X, Y, Z...> getProperties2KHR(Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void getFormatProperties2( Format format, FormatProperties2* pFormatProperties, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    FormatProperties2 getFormatProperties2( Format format, Dispatch const &d = Dispatch() ) const;
-    template <typename X, typename Y, typename ...Z, typename Dispatch = DispatchLoaderStatic>
-    StructureChain<X, Y, Z...> getFormatProperties2( Format format, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void getFormatProperties2KHR( Format format, FormatProperties2* pFormatProperties, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    FormatProperties2 getFormatProperties2KHR( Format format, Dispatch const &d = Dispatch() ) const;
-    template <typename X, typename Y, typename ...Z, typename Dispatch = DispatchLoaderStatic>
-    StructureChain<X, Y, Z...> getFormatProperties2KHR( Format format, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2* pImageFormatInfo, ImageFormatProperties2* pImageFormatProperties, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<ImageFormatProperties2>::type getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d = Dispatch() ) const;
-    template <typename X, typename Y, typename ...Z, typename Dispatch = DispatchLoaderStatic>
-    typename ResultValueType<StructureChain<X, Y, Z...>>::type getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2* pImageFormatInfo, ImageFormatProperties2* pImageFormatProperties, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<ImageFormatProperties2>::type getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d = Dispatch() ) const;
-    template <typename X, typename Y, typename ...Z, typename Dispatch = DispatchLoaderStatic>
-    typename ResultValueType<StructureChain<X, Y, Z...>>::type getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void getQueueFamilyProperties2( uint32_t* pQueueFamilyPropertyCount, QueueFamilyProperties2* pQueueFamilyProperties, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Allocator = std::allocator<QueueFamilyProperties2>, typename Dispatch = DispatchLoaderStatic> 
-    std::vector<QueueFamilyProperties2,Allocator> getQueueFamilyProperties2(Dispatch const &d = Dispatch() ) const;
-    template <typename Allocator = std::allocator<QueueFamilyProperties2>, typename Dispatch = DispatchLoaderStatic> 
-    std::vector<QueueFamilyProperties2,Allocator> getQueueFamilyProperties2(Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void getQueueFamilyProperties2KHR( uint32_t* pQueueFamilyPropertyCount, QueueFamilyProperties2* pQueueFamilyProperties, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Allocator = std::allocator<QueueFamilyProperties2>, typename Dispatch = DispatchLoaderStatic> 
-    std::vector<QueueFamilyProperties2,Allocator> getQueueFamilyProperties2KHR(Dispatch const &d = Dispatch() ) const;
-    template <typename Allocator = std::allocator<QueueFamilyProperties2>, typename Dispatch = DispatchLoaderStatic> 
-    std::vector<QueueFamilyProperties2,Allocator> getQueueFamilyProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void getMemoryProperties2( PhysicalDeviceMemoryProperties2* pMemoryProperties, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    PhysicalDeviceMemoryProperties2 getMemoryProperties2(Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void getMemoryProperties2KHR( PhysicalDeviceMemoryProperties2* pMemoryProperties, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    PhysicalDeviceMemoryProperties2 getMemoryProperties2KHR(Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, SparseImageFormatProperties2* pProperties, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Allocator = std::allocator<SparseImageFormatProperties2>, typename Dispatch = DispatchLoaderStatic> 
-    std::vector<SparseImageFormatProperties2,Allocator> getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const &d = Dispatch() ) const;
-    template <typename Allocator = std::allocator<SparseImageFormatProperties2>, typename Dispatch = DispatchLoaderStatic> 
-    std::vector<SparseImageFormatProperties2,Allocator> getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, SparseImageFormatProperties2* pProperties, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Allocator = std::allocator<SparseImageFormatProperties2>, typename Dispatch = DispatchLoaderStatic> 
-    std::vector<SparseImageFormatProperties2,Allocator> getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const &d = Dispatch() ) const;
-    template <typename Allocator = std::allocator<SparseImageFormatProperties2>, typename Dispatch = DispatchLoaderStatic> 
-    std::vector<SparseImageFormatProperties2,Allocator> getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void getExternalBufferProperties( const PhysicalDeviceExternalBufferInfo* pExternalBufferInfo, ExternalBufferProperties* pExternalBufferProperties, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ExternalBufferProperties getExternalBufferProperties( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void getExternalBufferPropertiesKHR( const PhysicalDeviceExternalBufferInfo* pExternalBufferInfo, ExternalBufferProperties* pExternalBufferProperties, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ExternalBufferProperties getExternalBufferPropertiesKHR( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void getExternalSemaphoreProperties( const PhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, ExternalSemaphoreProperties* pExternalSemaphoreProperties, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ExternalSemaphoreProperties getExternalSemaphoreProperties( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void getExternalSemaphorePropertiesKHR( const PhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, ExternalSemaphoreProperties* pExternalSemaphoreProperties, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ExternalSemaphoreProperties getExternalSemaphorePropertiesKHR( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void getExternalFenceProperties( const PhysicalDeviceExternalFenceInfo* pExternalFenceInfo, ExternalFenceProperties* pExternalFenceProperties, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ExternalFenceProperties getExternalFenceProperties( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void getExternalFencePropertiesKHR( const PhysicalDeviceExternalFenceInfo* pExternalFenceInfo, ExternalFenceProperties* pExternalFenceProperties, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ExternalFenceProperties getExternalFencePropertiesKHR( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result releaseDisplayEXT( DisplayKHR display, Dispatch const &d = Dispatch() ) const;
-#else
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<void>::type releaseDisplayEXT( DisplayKHR display, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VK_USE_PLATFORM_XLIB_XRANDR_NV
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result acquireXlibDisplayEXT( Display* dpy, DisplayKHR display, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<Display>::type acquireXlibDisplayEXT( DisplayKHR display, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_XLIB_XRANDR_NV*/
-
-#ifdef VK_USE_PLATFORM_XLIB_XRANDR_NV
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result getRandROutputDisplayEXT( Display* dpy, RROutput rrOutput, DisplayKHR* pDisplay, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<DisplayKHR>::type getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_XLIB_XRANDR_NV*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result getSurfaceCapabilities2EXT( SurfaceKHR surface, SurfaceCapabilities2EXT* pSurfaceCapabilities, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<SurfaceCapabilities2EXT>::type getSurfaceCapabilities2EXT( SurfaceKHR surface, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result getPresentRectanglesKHR( SurfaceKHR surface, uint32_t* pRectCount, Rect2D* pRects, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Allocator = std::allocator<Rect2D>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<Rect2D,Allocator>>::type getPresentRectanglesKHR( SurfaceKHR surface, Dispatch const &d = Dispatch() ) const;
-    template <typename Allocator = std::allocator<Rect2D>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<Rect2D,Allocator>>::type getPresentRectanglesKHR( SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void getMultisamplePropertiesEXT( SampleCountFlagBits samples, MultisamplePropertiesEXT* pMultisampleProperties, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    MultisamplePropertiesEXT getMultisamplePropertiesEXT( SampleCountFlagBits samples, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, SurfaceCapabilities2KHR* pSurfaceCapabilities, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<SurfaceCapabilities2KHR>::type getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d = Dispatch() ) const;
-    template <typename X, typename Y, typename ...Z, typename Dispatch = DispatchLoaderStatic>
-    typename ResultValueType<StructureChain<X, Y, Z...>>::type getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, SurfaceFormat2KHR* pSurfaceFormats, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Allocator = std::allocator<SurfaceFormat2KHR>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<SurfaceFormat2KHR,Allocator>>::type getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d = Dispatch() ) const;
-    template <typename Allocator = std::allocator<SurfaceFormat2KHR>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<SurfaceFormat2KHR,Allocator>>::type getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result getDisplayProperties2KHR( uint32_t* pPropertyCount, DisplayProperties2KHR* pProperties, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Allocator = std::allocator<DisplayProperties2KHR>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<DisplayProperties2KHR,Allocator>>::type getDisplayProperties2KHR(Dispatch const &d = Dispatch() ) const;
-    template <typename Allocator = std::allocator<DisplayProperties2KHR>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<DisplayProperties2KHR,Allocator>>::type getDisplayProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result getDisplayPlaneProperties2KHR( uint32_t* pPropertyCount, DisplayPlaneProperties2KHR* pProperties, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Allocator = std::allocator<DisplayPlaneProperties2KHR>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<DisplayPlaneProperties2KHR,Allocator>>::type getDisplayPlaneProperties2KHR(Dispatch const &d = Dispatch() ) const;
-    template <typename Allocator = std::allocator<DisplayPlaneProperties2KHR>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<DisplayPlaneProperties2KHR,Allocator>>::type getDisplayPlaneProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result getDisplayModeProperties2KHR( DisplayKHR display, uint32_t* pPropertyCount, DisplayModeProperties2KHR* pProperties, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Allocator = std::allocator<DisplayModeProperties2KHR>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<DisplayModeProperties2KHR,Allocator>>::type getDisplayModeProperties2KHR( DisplayKHR display, Dispatch const &d = Dispatch() ) const;
-    template <typename Allocator = std::allocator<DisplayModeProperties2KHR>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<DisplayModeProperties2KHR,Allocator>>::type getDisplayModeProperties2KHR( DisplayKHR display, Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result getDisplayPlaneCapabilities2KHR( const DisplayPlaneInfo2KHR* pDisplayPlaneInfo, DisplayPlaneCapabilities2KHR* pCapabilities, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<DisplayPlaneCapabilities2KHR>::type getDisplayPlaneCapabilities2KHR( const DisplayPlaneInfo2KHR & displayPlaneInfo, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result getCalibrateableTimeDomainsEXT( uint32_t* pTimeDomainCount, TimeDomainEXT* pTimeDomains, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Allocator = std::allocator<TimeDomainEXT>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<TimeDomainEXT,Allocator>>::type getCalibrateableTimeDomainsEXT(Dispatch const &d = Dispatch() ) const;
-    template <typename Allocator = std::allocator<TimeDomainEXT>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<TimeDomainEXT,Allocator>>::type getCalibrateableTimeDomainsEXT(Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPhysicalDevice() const
-    {
-      return m_physicalDevice;
-    }
-
-    explicit operator bool() const
-    {
-      return m_physicalDevice != VK_NULL_HANDLE;
-    }
-
-    bool operator!() const
-    {
-      return m_physicalDevice == VK_NULL_HANDLE;
-    }
-
-  private:
-    VkPhysicalDevice m_physicalDevice;
-  };
-
-  static_assert( sizeof( PhysicalDevice ) == sizeof( VkPhysicalDevice ), "handle and wrapper have different size!" );
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void PhysicalDevice::getProperties( PhysicalDeviceProperties* pProperties, Dispatch const &d) const
-  {
-    d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties*>( pProperties ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE PhysicalDeviceProperties PhysicalDevice::getProperties(Dispatch const &d ) const
-  {
-    PhysicalDeviceProperties properties;
-    d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties*>( &properties ) );
-    return properties;
-  }
-  template <typename X, typename Y, typename ...Z, typename Dispatch>
-  VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getProperties(Dispatch const &d ) const
-  {
-    StructureChain<X, Y, Z...> structureChain;
-    PhysicalDeviceProperties& properties = structureChain.template get<PhysicalDeviceProperties>();
-    d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties*>( &properties ) );
-    return structureChain;
+    d.vkUpdateDescriptorSetWithTemplate( m_device, static_cast<VkDescriptorSet>( descriptorSet ), static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), pData );
   }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties( uint32_t* pQueueFamilyPropertyCount, QueueFamilyProperties* pQueueFamilyProperties, Dispatch const &d) const
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
-    d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties*>( pQueueFamilyProperties ) );
+    d.vkUpdateDescriptorSetWithTemplateKHR( m_device, static_cast<VkDescriptorSet>( descriptorSet ), static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), pData );
   }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE std::vector<QueueFamilyProperties,Allocator> PhysicalDevice::getQueueFamilyProperties(Dispatch const &d ) const
-  {
-    std::vector<QueueFamilyProperties,Allocator> queueFamilyProperties;
-    uint32_t queueFamilyPropertyCount;
-    d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
-    queueFamilyProperties.resize( queueFamilyPropertyCount );
-    d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties*>( queueFamilyProperties.data() ) );
-    return queueFamilyProperties;
-  }
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE std::vector<QueueFamilyProperties,Allocator> PhysicalDevice::getQueueFamilyProperties(Allocator const& vectorAllocator, Dispatch const &d ) const
-  {
-    std::vector<QueueFamilyProperties,Allocator> queueFamilyProperties( vectorAllocator );
-    uint32_t queueFamilyPropertyCount;
-    d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
-    queueFamilyProperties.resize( queueFamilyPropertyCount );
-    d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties*>( queueFamilyProperties.data() ) );
-    return queueFamilyProperties;
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties( PhysicalDeviceMemoryProperties* pMemoryProperties, Dispatch const &d) const
-  {
-    d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties*>( pMemoryProperties ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE PhysicalDeviceMemoryProperties PhysicalDevice::getMemoryProperties(Dispatch const &d ) const
-  {
-    PhysicalDeviceMemoryProperties memoryProperties;
-    d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties*>( &memoryProperties ) );
-    return memoryProperties;
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void PhysicalDevice::getFeatures( PhysicalDeviceFeatures* pFeatures, Dispatch const &d) const
-  {
-    d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures*>( pFeatures ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE PhysicalDeviceFeatures PhysicalDevice::getFeatures(Dispatch const &d ) const
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::updateDescriptorSets( uint32_t descriptorWriteCount, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const VULKAN_HPP_NAMESPACE::CopyDescriptorSet* pDescriptorCopies, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
-    PhysicalDeviceFeatures features;
-    d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures*>( &features ) );
-    return features;
+    d.vkUpdateDescriptorSets( m_device, descriptorWriteCount, reinterpret_cast<const VkWriteDescriptorSet *>( pDescriptorWrites ), descriptorCopyCount, reinterpret_cast<const VkCopyDescriptorSet *>( pDescriptorCopies ) );
   }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties( Format format, FormatProperties* pFormatProperties, Dispatch const &d) const
-  {
-    d.vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties*>( pFormatProperties ) );
-  }
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE FormatProperties PhysicalDevice::getFormatProperties( Format format, Dispatch const &d ) const
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::updateDescriptorSets( ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites, ArrayProxy<const VULKAN_HPP_NAMESPACE::CopyDescriptorSet> const & descriptorCopies, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
   {
-    FormatProperties formatProperties;
-    d.vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties*>( &formatProperties ) );
-    return formatProperties;
+    d.vkUpdateDescriptorSets( m_device, descriptorWrites.size(), reinterpret_cast<const VkWriteDescriptorSet *>( descriptorWrites.data() ), descriptorCopies.size(), reinterpret_cast<const VkCopyDescriptorSet *>( descriptorCopies.data() ) );
   }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties( Format format, ImageType type, ImageTiling tiling, ImageUsageFlags usage, ImageCreateFlags flags, ImageFormatProperties* pImageFormatProperties, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkImageTiling>( tiling ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageCreateFlags>( flags ), reinterpret_cast<VkImageFormatProperties*>( pImageFormatProperties ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<ImageFormatProperties>::type PhysicalDevice::getImageFormatProperties( Format format, ImageType type, ImageTiling tiling, ImageUsageFlags usage, ImageCreateFlags flags, Dispatch const &d ) const
-  {
-    ImageFormatProperties imageFormatProperties;
-    Result result = static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkImageTiling>( tiling ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageCreateFlags>( flags ), reinterpret_cast<VkImageFormatProperties*>( &imageFormatProperties ) ) );
-    return createResultValue( result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getImageFormatProperties" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result PhysicalDevice::createDevice( const DeviceCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Device* pDevice, Dispatch const &d) const
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitForFences( uint32_t fenceCount, const VULKAN_HPP_NAMESPACE::Fence* pFences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
-    return static_cast<Result>( d.vkCreateDevice( m_physicalDevice, reinterpret_cast<const VkDeviceCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDevice*>( pDevice ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<Device>::type PhysicalDevice::createDevice( const DeviceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    Device device;
-    Result result = static_cast<Result>( d.vkCreateDevice( m_physicalDevice, reinterpret_cast<const VkDeviceCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDevice*>( &device ) ) );
-    return createResultValue( result, device, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::createDevice" );
-  }
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Device,Dispatch>>::type PhysicalDevice::createDeviceUnique( const DeviceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    Device device;
-    Result result = static_cast<Result>( d.vkCreateDevice( m_physicalDevice, reinterpret_cast<const VkDeviceCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDevice*>( &device ) ) );
-
-    ObjectDestroy<NoParent,Dispatch> deleter( allocator, d );
-    return createResultValue<Device,Dispatch>( result, device, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::createDeviceUnique", deleter );
+    return static_cast<Result>( d.vkWaitForFences( m_device, fenceCount, reinterpret_cast<const VkFence *>( pFences ), static_cast<VkBool32>( waitAll ), timeout ) );
   }
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceLayerProperties( uint32_t* pPropertyCount, LayerProperties* pProperties, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, pPropertyCount, reinterpret_cast<VkLayerProperties*>( pProperties ) ) );
-  }
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<LayerProperties,Allocator>>::type PhysicalDevice::enumerateDeviceLayerProperties(Dispatch const &d ) const
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitForFences( ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout, Dispatch const & d ) const
   {
-    std::vector<LayerProperties,Allocator> properties;
-    uint32_t propertyCount;
-    Result result;
-    do
-    {
-      result = static_cast<Result>( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr ) );
-      if ( ( result == Result::eSuccess ) && propertyCount )
-      {
-        properties.resize( propertyCount );
-        result = static_cast<Result>( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, reinterpret_cast<VkLayerProperties*>( properties.data() ) ) );
-      }
-    } while ( result == Result::eIncomplete );
-    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
-    properties.resize( propertyCount );
-    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateDeviceLayerProperties" );
-  }
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<LayerProperties,Allocator>>::type PhysicalDevice::enumerateDeviceLayerProperties(Allocator const& vectorAllocator, Dispatch const &d ) const
-  {
-    std::vector<LayerProperties,Allocator> properties( vectorAllocator );
-    uint32_t propertyCount;
-    Result result;
-    do
-    {
-      result = static_cast<Result>( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr ) );
-      if ( ( result == Result::eSuccess ) && propertyCount )
-      {
-        properties.resize( propertyCount );
-        result = static_cast<Result>( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, reinterpret_cast<VkLayerProperties*>( properties.data() ) ) );
-      }
-    } while ( result == Result::eIncomplete );
-    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
-    properties.resize( propertyCount );
-    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateDeviceLayerProperties" );
+    Result result = static_cast<Result>( d.vkWaitForFences( m_device, fences.size(), reinterpret_cast<const VkFence *>( fences.data() ), static_cast<VkBool32>( waitAll ), timeout ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitForFences", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } );
   }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, ExtensionProperties* pProperties, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties*>( pProperties ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<ExtensionProperties,Allocator>>::type PhysicalDevice::enumerateDeviceExtensionProperties( Optional<const std::string> layerName, Dispatch const &d ) const
-  {
-    std::vector<ExtensionProperties,Allocator> properties;
-    uint32_t propertyCount;
-    Result result;
-    do
-    {
-      result = static_cast<Result>( d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) );
-      if ( ( result == Result::eSuccess ) && propertyCount )
-      {
-        properties.resize( propertyCount );
-        result = static_cast<Result>( d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties*>( properties.data() ) ) );
-      }
-    } while ( result == Result::eIncomplete );
-    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
-    properties.resize( propertyCount );
-    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateDeviceExtensionProperties" );
-  }
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<ExtensionProperties,Allocator>>::type PhysicalDevice::enumerateDeviceExtensionProperties( Optional<const std::string> layerName, Allocator const& vectorAllocator, Dispatch const &d ) const
-  {
-    std::vector<ExtensionProperties,Allocator> properties( vectorAllocator );
-    uint32_t propertyCount;
-    Result result;
-    do
-    {
-      result = static_cast<Result>( d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) );
-      if ( ( result == Result::eSuccess ) && propertyCount )
-      {
-        properties.resize( propertyCount );
-        result = static_cast<Result>( d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties*>( properties.data() ) ) );
-      }
-    } while ( result == Result::eIncomplete );
-    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
-    properties.resize( propertyCount );
-    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateDeviceExtensionProperties" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties( Format format, ImageType type, SampleCountFlagBits samples, ImageUsageFlags usage, ImageTiling tiling, uint32_t* pPropertyCount, SparseImageFormatProperties* pProperties, Dispatch const &d) const
-  {
-    d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkSampleCountFlagBits>( samples ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageTiling>( tiling ), pPropertyCount, reinterpret_cast<VkSparseImageFormatProperties*>( pProperties ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties,Allocator> PhysicalDevice::getSparseImageFormatProperties( Format format, ImageType type, SampleCountFlagBits samples, ImageUsageFlags usage, ImageTiling tiling, Dispatch const &d ) const
-  {
-    std::vector<SparseImageFormatProperties,Allocator> properties;
-    uint32_t propertyCount;
-    d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkSampleCountFlagBits>( samples ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageTiling>( tiling ), &propertyCount, nullptr );
-    properties.resize( propertyCount );
-    d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkSampleCountFlagBits>( samples ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageTiling>( tiling ), &propertyCount, reinterpret_cast<VkSparseImageFormatProperties*>( properties.data() ) );
-    return properties;
-  }
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties,Allocator> PhysicalDevice::getSparseImageFormatProperties( Format format, ImageType type, SampleCountFlagBits samples, ImageUsageFlags usage, ImageTiling tiling, Allocator const& vectorAllocator, Dispatch const &d ) const
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo* pWaitInfo, uint64_t timeout, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
-    std::vector<SparseImageFormatProperties,Allocator> properties( vectorAllocator );
-    uint32_t propertyCount;
-    d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkSampleCountFlagBits>( samples ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageTiling>( tiling ), &propertyCount, nullptr );
-    properties.resize( propertyCount );
-    d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkSampleCountFlagBits>( samples ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageTiling>( tiling ), &propertyCount, reinterpret_cast<VkSparseImageFormatProperties*>( properties.data() ) );
-    return properties;
+    return static_cast<Result>( d.vkWaitSemaphores( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( pWaitInfo ), timeout ) );
   }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPropertiesKHR( uint32_t* pPropertyCount, DisplayPropertiesKHR* pProperties, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPropertiesKHR*>( pProperties ) ) );
-  }
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayPropertiesKHR,Allocator>>::type PhysicalDevice::getDisplayPropertiesKHR(Dispatch const &d ) const
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitSemaphores( const SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const & d ) const
   {
-    std::vector<DisplayPropertiesKHR,Allocator> properties;
-    uint32_t propertyCount;
-    Result result;
-    do
-    {
-      result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) );
-      if ( ( result == Result::eSuccess ) && propertyCount )
-      {
-        properties.resize( propertyCount );
-        result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPropertiesKHR*>( properties.data() ) ) );
-      }
-    } while ( result == Result::eIncomplete );
-    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
-    properties.resize( propertyCount );
-    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPropertiesKHR" );
-  }
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayPropertiesKHR,Allocator>>::type PhysicalDevice::getDisplayPropertiesKHR(Allocator const& vectorAllocator, Dispatch const &d ) const
-  {
-    std::vector<DisplayPropertiesKHR,Allocator> properties( vectorAllocator );
-    uint32_t propertyCount;
-    Result result;
-    do
-    {
-      result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) );
-      if ( ( result == Result::eSuccess ) && propertyCount )
-      {
-        properties.resize( propertyCount );
-        result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPropertiesKHR*>( properties.data() ) ) );
-      }
-    } while ( result == Result::eIncomplete );
-    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
-    properties.resize( propertyCount );
-    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPropertiesKHR" );
+    Result result = static_cast<Result>( d.vkWaitSemaphores( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( &waitInfo ), timeout ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphores", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } );
   }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlanePropertiesKHR( uint32_t* pPropertyCount, DisplayPlanePropertiesKHR* pProperties, Dispatch const &d) const
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo* pWaitInfo, uint64_t timeout, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
-    return static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR*>( pProperties ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayPlanePropertiesKHR,Allocator>>::type PhysicalDevice::getDisplayPlanePropertiesKHR(Dispatch const &d ) const
-  {
-    std::vector<DisplayPlanePropertiesKHR,Allocator> properties;
-    uint32_t propertyCount;
-    Result result;
-    do
-    {
-      result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) );
-      if ( ( result == Result::eSuccess ) && propertyCount )
-      {
-        properties.resize( propertyCount );
-        result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR*>( properties.data() ) ) );
-      }
-    } while ( result == Result::eIncomplete );
-    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
-    properties.resize( propertyCount );
-    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlanePropertiesKHR" );
-  }
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayPlanePropertiesKHR,Allocator>>::type PhysicalDevice::getDisplayPlanePropertiesKHR(Allocator const& vectorAllocator, Dispatch const &d ) const
-  {
-    std::vector<DisplayPlanePropertiesKHR,Allocator> properties( vectorAllocator );
-    uint32_t propertyCount;
-    Result result;
-    do
-    {
-      result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) );
-      if ( ( result == Result::eSuccess ) && propertyCount )
-      {
-        properties.resize( propertyCount );
-        result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR*>( properties.data() ) ) );
-      }
-    } while ( result == Result::eIncomplete );
-    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
-    properties.resize( propertyCount );
-    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlanePropertiesKHR" );
+    return static_cast<Result>( d.vkWaitSemaphoresKHR( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( pWaitInfo ), timeout ) );
   }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, uint32_t* pDisplayCount, DisplayKHR* pDisplays, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, pDisplayCount, reinterpret_cast<VkDisplayKHR*>( pDisplays ) ) );
-  }
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayKHR,Allocator>>::type PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const &d ) const
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitSemaphoresKHR( const SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const & d ) const
   {
-    std::vector<DisplayKHR,Allocator> displays;
-    uint32_t displayCount;
-    Result result;
-    do
-    {
-      result = static_cast<Result>( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ) );
-      if ( ( result == Result::eSuccess ) && displayCount )
-      {
-        displays.resize( displayCount );
-        result = static_cast<Result>( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, reinterpret_cast<VkDisplayKHR*>( displays.data() ) ) );
-      }
-    } while ( result == Result::eIncomplete );
-    VULKAN_HPP_ASSERT( displayCount <= displays.size() );
-    displays.resize( displayCount );
-    return createResultValue( result, displays, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" );
-  }
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayKHR,Allocator>>::type PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Allocator const& vectorAllocator, Dispatch const &d ) const
-  {
-    std::vector<DisplayKHR,Allocator> displays( vectorAllocator );
-    uint32_t displayCount;
-    Result result;
-    do
-    {
-      result = static_cast<Result>( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ) );
-      if ( ( result == Result::eSuccess ) && displayCount )
-      {
-        displays.resize( displayCount );
-        result = static_cast<Result>( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, reinterpret_cast<VkDisplayKHR*>( displays.data() ) ) );
-      }
-    } while ( result == Result::eIncomplete );
-    VULKAN_HPP_ASSERT( displayCount <= displays.size() );
-    displays.resize( displayCount );
-    return createResultValue( result, displays, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" );
+    Result result = static_cast<Result>( d.vkWaitSemaphoresKHR( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( &waitInfo ), timeout ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphoresKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } );
   }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
+#ifdef VK_ENABLE_BETA_EXTENSIONS
   template<typename Dispatch>
-  VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModePropertiesKHR( DisplayKHR display, uint32_t* pPropertyCount, DisplayModePropertiesKHR* pProperties, Dispatch const &d) const
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, size_t dataSize, void* pData, size_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
   {
-    return static_cast<Result>( d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), pPropertyCount, reinterpret_cast<VkDisplayModePropertiesKHR*>( pProperties ) ) );
+    return static_cast<Result>( d.vkWriteAccelerationStructuresPropertiesKHR( m_device, accelerationStructureCount, reinterpret_cast<const VkAccelerationStructureKHR*>( pAccelerationStructures ), static_cast<VkQueryType>( queryType ), dataSize, pData, stride ) );
   }
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayModePropertiesKHR,Allocator>>::type PhysicalDevice::getDisplayModePropertiesKHR( DisplayKHR display, Dispatch const &d ) const
+  template<typename T, typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::writeAccelerationStructuresPropertiesKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const &accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, ArrayProxy<T> const &data, size_t stride, Dispatch const &d ) const
   {
-    std::vector<DisplayModePropertiesKHR,Allocator> properties;
-    uint32_t propertyCount;
-    Result result;
-    do
-    {
-      result = static_cast<Result>( d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) );
-      if ( ( result == Result::eSuccess ) && propertyCount )
-      {
-        properties.resize( propertyCount );
-        result = static_cast<Result>( d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModePropertiesKHR*>( properties.data() ) ) );
-      }
-    } while ( result == Result::eIncomplete );
-    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
-    properties.resize( propertyCount );
-    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayModePropertiesKHR" );
-  }
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayModePropertiesKHR,Allocator>>::type PhysicalDevice::getDisplayModePropertiesKHR( DisplayKHR display, Allocator const& vectorAllocator, Dispatch const &d ) const
-  {
-    std::vector<DisplayModePropertiesKHR,Allocator> properties( vectorAllocator );
-    uint32_t propertyCount;
-    Result result;
-    do
-    {
-      result = static_cast<Result>( d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) );
-      if ( ( result == Result::eSuccess ) && propertyCount )
-      {
-        properties.resize( propertyCount );
-        result = static_cast<Result>( d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModePropertiesKHR*>( properties.data() ) ) );
-      }
-    } while ( result == Result::eIncomplete );
-    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
-    properties.resize( propertyCount );
-    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayModePropertiesKHR" );
+    Result result = static_cast<Result>( d.vkWriteAccelerationStructuresPropertiesKHR( m_device, accelerationStructures.size() , reinterpret_cast<const VkAccelerationStructureKHR*>( accelerationStructures.data() ), static_cast<VkQueryType>( queryType ), data.size() * sizeof( T ) , reinterpret_cast<void*>( data.data() ), stride ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::writeAccelerationStructuresPropertiesKHR" );
   }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result PhysicalDevice::createDisplayModeKHR( DisplayKHR display, const DisplayModeCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, DisplayModeKHR* pMode, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkCreateDisplayModeKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayModeCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDisplayModeKHR*>( pMode ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<DisplayModeKHR>::type PhysicalDevice::createDisplayModeKHR( DisplayKHR display, const DisplayModeCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    DisplayModeKHR mode;
-    Result result = static_cast<Result>( d.vkCreateDisplayModeKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayModeCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDisplayModeKHR*>( &mode ) ) );
-    return createResultValue( result, mode, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::createDisplayModeKHR" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneCapabilitiesKHR( DisplayModeKHR mode, uint32_t planeIndex, DisplayPlaneCapabilitiesKHR* pCapabilities, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkGetDisplayPlaneCapabilitiesKHR( m_physicalDevice, static_cast<VkDisplayModeKHR>( mode ), planeIndex, reinterpret_cast<VkDisplayPlaneCapabilitiesKHR*>( pCapabilities ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<DisplayPlaneCapabilitiesKHR>::type PhysicalDevice::getDisplayPlaneCapabilitiesKHR( DisplayModeKHR mode, uint32_t planeIndex, Dispatch const &d ) const
-  {
-    DisplayPlaneCapabilitiesKHR capabilities;
-    Result result = static_cast<Result>( d.vkGetDisplayPlaneCapabilitiesKHR( m_physicalDevice, static_cast<VkDisplayModeKHR>( mode ), planeIndex, reinterpret_cast<VkDisplayPlaneCapabilitiesKHR*>( &capabilities ) ) );
-    return createResultValue( result, capabilities, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlaneCapabilitiesKHR" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, SurfaceKHR surface, Bool32* pSupported, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceSupportKHR( m_physicalDevice, queueFamilyIndex, static_cast<VkSurfaceKHR>( surface ), pSupported ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<Bool32>::type PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, SurfaceKHR surface, Dispatch const &d ) const
-  {
-    Bool32 supported;
-    Result result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceSupportKHR( m_physicalDevice, queueFamilyIndex, static_cast<VkSurfaceKHR>( surface ), &supported ) );
-    return createResultValue( result, supported, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceSupportKHR" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilitiesKHR( SurfaceKHR surface, SurfaceCapabilitiesKHR* pSurfaceCapabilities, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilitiesKHR*>( pSurfaceCapabilities ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<SurfaceCapabilitiesKHR>::type PhysicalDevice::getSurfaceCapabilitiesKHR( SurfaceKHR surface, Dispatch const &d ) const
-  {
-    SurfaceCapabilitiesKHR surfaceCapabilities;
-    Result result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilitiesKHR*>( &surfaceCapabilities ) ) );
-    return createResultValue( result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceCapabilitiesKHR" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormatsKHR( SurfaceKHR surface, uint32_t* pSurfaceFormatCount, SurfaceFormatKHR* pSurfaceFormats, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pSurfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR*>( pSurfaceFormats ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<SurfaceFormatKHR,Allocator>>::type PhysicalDevice::getSurfaceFormatsKHR( SurfaceKHR surface, Dispatch const &d ) const
-  {
-    std::vector<SurfaceFormatKHR,Allocator> surfaceFormats;
-    uint32_t surfaceFormatCount;
-    Result result;
-    do
-    {
-      result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, nullptr ) );
-      if ( ( result == Result::eSuccess ) && surfaceFormatCount )
-      {
-        surfaceFormats.resize( surfaceFormatCount );
-        result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR*>( surfaceFormats.data() ) ) );
-      }
-    } while ( result == Result::eIncomplete );
-    VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
-    surfaceFormats.resize( surfaceFormatCount );
-    return createResultValue( result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceFormatsKHR" );
-  }
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<SurfaceFormatKHR,Allocator>>::type PhysicalDevice::getSurfaceFormatsKHR( SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const
-  {
-    std::vector<SurfaceFormatKHR,Allocator> surfaceFormats( vectorAllocator );
-    uint32_t surfaceFormatCount;
-    Result result;
-    do
-    {
-      result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, nullptr ) );
-      if ( ( result == Result::eSuccess ) && surfaceFormatCount )
-      {
-        surfaceFormats.resize( surfaceFormatCount );
-        result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR*>( surfaceFormats.data() ) ) );
-      }
-    } while ( result == Result::eIncomplete );
-    VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
-    surfaceFormats.resize( surfaceFormatCount );
-    return createResultValue( result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceFormatsKHR" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result PhysicalDevice::getSurfacePresentModesKHR( SurfaceKHR surface, uint32_t* pPresentModeCount, PresentModeKHR* pPresentModes, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pPresentModeCount, reinterpret_cast<VkPresentModeKHR*>( pPresentModes ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<PresentModeKHR,Allocator>>::type PhysicalDevice::getSurfacePresentModesKHR( SurfaceKHR surface, Dispatch const &d ) const
-  {
-    std::vector<PresentModeKHR,Allocator> presentModes;
-    uint32_t presentModeCount;
-    Result result;
-    do
-    {
-      result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, nullptr ) );
-      if ( ( result == Result::eSuccess ) && presentModeCount )
-      {
-        presentModes.resize( presentModeCount );
-        result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, reinterpret_cast<VkPresentModeKHR*>( presentModes.data() ) ) );
-      }
-    } while ( result == Result::eIncomplete );
-    VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
-    presentModes.resize( presentModeCount );
-    return createResultValue( result, presentModes, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfacePresentModesKHR" );
-  }
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<PresentModeKHR,Allocator>>::type PhysicalDevice::getSurfacePresentModesKHR( SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const
-  {
-    std::vector<PresentModeKHR,Allocator> presentModes( vectorAllocator );
-    uint32_t presentModeCount;
-    Result result;
-    do
-    {
-      result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, nullptr ) );
-      if ( ( result == Result::eSuccess ) && presentModeCount )
-      {
-        presentModes.resize( presentModeCount );
-        result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, reinterpret_cast<VkPresentModeKHR*>( presentModes.data() ) ) );
-      }
-    } while ( result == Result::eIncomplete );
-    VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
-    presentModes.resize( presentModeCount );
-    return createResultValue( result, presentModes, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfacePresentModesKHR" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VK_USE_PLATFORM_WAYLAND_KHR
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display* display, Dispatch const &d) const
-  {
-    return d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, display );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display & display, Dispatch const &d ) const
-  {
-    return d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &display );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, Dispatch const &d) const
-  {
-    return d.vkGetPhysicalDeviceWin32PresentationSupportKHR( m_physicalDevice, queueFamilyIndex );
-  }
-#else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, Dispatch const &d ) const
-  {
-    return d.vkGetPhysicalDeviceWin32PresentationSupportKHR( m_physicalDevice, queueFamilyIndex );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-
-#ifdef VK_USE_PLATFORM_XLIB_KHR
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display* dpy, VisualID visualID, Dispatch const &d) const
-  {
-    return d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, dpy, visualID );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID, Dispatch const &d ) const
-  {
-    return d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &dpy, visualID );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_XLIB_KHR*/
-
-#ifdef VK_USE_PLATFORM_XCB_KHR
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id, Dispatch const &d) const
-  {
-    return d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, connection, visual_id );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t & connection, xcb_visualid_t visual_id, Dispatch const &d ) const
-  {
-    return d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &connection, visual_id );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_XCB_KHR*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result PhysicalDevice::getExternalImageFormatPropertiesNV( Format format, ImageType type, ImageTiling tiling, ImageUsageFlags usage, ImageCreateFlags flags, ExternalMemoryHandleTypeFlagsNV externalHandleType, ExternalImageFormatPropertiesNV* pExternalImageFormatProperties, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkImageTiling>( tiling ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageCreateFlags>( flags ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( externalHandleType ), reinterpret_cast<VkExternalImageFormatPropertiesNV*>( pExternalImageFormatProperties ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<ExternalImageFormatPropertiesNV>::type PhysicalDevice::getExternalImageFormatPropertiesNV( Format format, ImageType type, ImageTiling tiling, ImageUsageFlags usage, ImageCreateFlags flags, ExternalMemoryHandleTypeFlagsNV externalHandleType, Dispatch const &d ) const
-  {
-    ExternalImageFormatPropertiesNV externalImageFormatProperties;
-    Result result = static_cast<Result>( d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkImageTiling>( tiling ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageCreateFlags>( flags ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( externalHandleType ), reinterpret_cast<VkExternalImageFormatPropertiesNV*>( &externalImageFormatProperties ) ) );
-    return createResultValue( result, externalImageFormatProperties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getExternalImageFormatPropertiesNV" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void PhysicalDevice::getGeneratedCommandsPropertiesNVX( DeviceGeneratedCommandsFeaturesNVX* pFeatures, DeviceGeneratedCommandsLimitsNVX* pLimits, Dispatch const &d) const
-  {
-    d.vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( m_physicalDevice, reinterpret_cast<VkDeviceGeneratedCommandsFeaturesNVX*>( pFeatures ), reinterpret_cast<VkDeviceGeneratedCommandsLimitsNVX*>( pLimits ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE DeviceGeneratedCommandsLimitsNVX PhysicalDevice::getGeneratedCommandsPropertiesNVX( DeviceGeneratedCommandsFeaturesNVX & features, Dispatch const &d ) const
-  {
-    DeviceGeneratedCommandsLimitsNVX limits;
-    d.vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( m_physicalDevice, reinterpret_cast<VkDeviceGeneratedCommandsFeaturesNVX*>( &features ), reinterpret_cast<VkDeviceGeneratedCommandsLimitsNVX*>( &limits ) );
-    return limits;
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2( PhysicalDeviceFeatures2* pFeatures, Dispatch const &d) const
-  {
-    d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2*>( pFeatures ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE PhysicalDeviceFeatures2 PhysicalDevice::getFeatures2(Dispatch const &d ) const
-  {
-    PhysicalDeviceFeatures2 features;
-    d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2*>( &features ) );
-    return features;
-  }
-  template <typename X, typename Y, typename ...Z, typename Dispatch>
-  VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getFeatures2(Dispatch const &d ) const
-  {
-    StructureChain<X, Y, Z...> structureChain;
-    PhysicalDeviceFeatures2& features = structureChain.template get<PhysicalDeviceFeatures2>();
-    d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2*>( &features ) );
-    return structureChain;
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2KHR( PhysicalDeviceFeatures2* pFeatures, Dispatch const &d) const
-  {
-    d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2*>( pFeatures ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE PhysicalDeviceFeatures2 PhysicalDevice::getFeatures2KHR(Dispatch const &d ) const
-  {
-    PhysicalDeviceFeatures2 features;
-    d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2*>( &features ) );
-    return features;
-  }
-  template <typename X, typename Y, typename ...Z, typename Dispatch>
-  VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getFeatures2KHR(Dispatch const &d ) const
-  {
-    StructureChain<X, Y, Z...> structureChain;
-    PhysicalDeviceFeatures2& features = structureChain.template get<PhysicalDeviceFeatures2>();
-    d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2*>( &features ) );
-    return structureChain;
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void PhysicalDevice::getProperties2( PhysicalDeviceProperties2* pProperties, Dispatch const &d) const
-  {
-    d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2*>( pProperties ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE PhysicalDeviceProperties2 PhysicalDevice::getProperties2(Dispatch const &d ) const
-  {
-    PhysicalDeviceProperties2 properties;
-    d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2*>( &properties ) );
-    return properties;
-  }
-  template <typename X, typename Y, typename ...Z, typename Dispatch>
-  VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getProperties2(Dispatch const &d ) const
-  {
-    StructureChain<X, Y, Z...> structureChain;
-    PhysicalDeviceProperties2& properties = structureChain.template get<PhysicalDeviceProperties2>();
-    d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2*>( &properties ) );
-    return structureChain;
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void PhysicalDevice::getProperties2KHR( PhysicalDeviceProperties2* pProperties, Dispatch const &d) const
-  {
-    d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2*>( pProperties ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE PhysicalDeviceProperties2 PhysicalDevice::getProperties2KHR(Dispatch const &d ) const
-  {
-    PhysicalDeviceProperties2 properties;
-    d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2*>( &properties ) );
-    return properties;
-  }
-  template <typename X, typename Y, typename ...Z, typename Dispatch>
-  VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getProperties2KHR(Dispatch const &d ) const
-  {
-    StructureChain<X, Y, Z...> structureChain;
-    PhysicalDeviceProperties2& properties = structureChain.template get<PhysicalDeviceProperties2>();
-    d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2*>( &properties ) );
-    return structureChain;
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties2( Format format, FormatProperties2* pFormatProperties, Dispatch const &d) const
-  {
-    d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2*>( pFormatProperties ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE FormatProperties2 PhysicalDevice::getFormatProperties2( Format format, Dispatch const &d ) const
-  {
-    FormatProperties2 formatProperties;
-    d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2*>( &formatProperties ) );
-    return formatProperties;
-  }
-  template <typename X, typename Y, typename ...Z, typename Dispatch>
-  VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getFormatProperties2( Format format, Dispatch const &d ) const
-  {
-    StructureChain<X, Y, Z...> structureChain;
-    FormatProperties2& formatProperties = structureChain.template get<FormatProperties2>();
-    d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2*>( &formatProperties ) );
-    return structureChain;
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties2KHR( Format format, FormatProperties2* pFormatProperties, Dispatch const &d) const
-  {
-    d.vkGetPhysicalDeviceFormatProperties2KHR( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2*>( pFormatProperties ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE FormatProperties2 PhysicalDevice::getFormatProperties2KHR( Format format, Dispatch const &d ) const
-  {
-    FormatProperties2 formatProperties;
-    d.vkGetPhysicalDeviceFormatProperties2KHR( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2*>( &formatProperties ) );
-    return formatProperties;
-  }
-  template <typename X, typename Y, typename ...Z, typename Dispatch>
-  VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getFormatProperties2KHR( Format format, Dispatch const &d ) const
-  {
-    StructureChain<X, Y, Z...> structureChain;
-    FormatProperties2& formatProperties = structureChain.template get<FormatProperties2>();
-    d.vkGetPhysicalDeviceFormatProperties2KHR( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2*>( &formatProperties ) );
-    return structureChain;
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2* pImageFormatInfo, ImageFormatProperties2* pImageFormatProperties, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2*>( pImageFormatInfo ), reinterpret_cast<VkImageFormatProperties2*>( pImageFormatProperties ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<ImageFormatProperties2>::type PhysicalDevice::getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d ) const
-  {
-    ImageFormatProperties2 imageFormatProperties;
-    Result result = static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2*>( &imageFormatInfo ), reinterpret_cast<VkImageFormatProperties2*>( &imageFormatProperties ) ) );
-    return createResultValue( result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getImageFormatProperties2" );
-  }
-  template <typename X, typename Y, typename ...Z, typename Dispatch>
-  VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type PhysicalDevice::getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d ) const
-  {
-    StructureChain<X, Y, Z...> structureChain;
-    ImageFormatProperties2& imageFormatProperties = structureChain.template get<ImageFormatProperties2>();
-    Result result = static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2*>( &imageFormatInfo ), reinterpret_cast<VkImageFormatProperties2*>( &imageFormatProperties ) ) );
-    return createResultValue( result, structureChain, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getImageFormatProperties2" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2* pImageFormatInfo, ImageFormatProperties2* pImageFormatProperties, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2*>( pImageFormatInfo ), reinterpret_cast<VkImageFormatProperties2*>( pImageFormatProperties ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<ImageFormatProperties2>::type PhysicalDevice::getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d ) const
-  {
-    ImageFormatProperties2 imageFormatProperties;
-    Result result = static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2*>( &imageFormatInfo ), reinterpret_cast<VkImageFormatProperties2*>( &imageFormatProperties ) ) );
-    return createResultValue( result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getImageFormatProperties2KHR" );
-  }
-  template <typename X, typename Y, typename ...Z, typename Dispatch>
-  VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type PhysicalDevice::getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d ) const
-  {
-    StructureChain<X, Y, Z...> structureChain;
-    ImageFormatProperties2& imageFormatProperties = structureChain.template get<ImageFormatProperties2>();
-    Result result = static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2*>( &imageFormatInfo ), reinterpret_cast<VkImageFormatProperties2*>( &imageFormatProperties ) ) );
-    return createResultValue( result, structureChain, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getImageFormatProperties2KHR" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties2( uint32_t* pQueueFamilyPropertyCount, QueueFamilyProperties2* pQueueFamilyProperties, Dispatch const &d) const
-  {
-    d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2*>( pQueueFamilyProperties ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE std::vector<QueueFamilyProperties2,Allocator> PhysicalDevice::getQueueFamilyProperties2(Dispatch const &d ) const
-  {
-    std::vector<QueueFamilyProperties2,Allocator> queueFamilyProperties;
-    uint32_t queueFamilyPropertyCount;
-    d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
-    queueFamilyProperties.resize( queueFamilyPropertyCount );
-    d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2*>( queueFamilyProperties.data() ) );
-    return queueFamilyProperties;
-  }
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE std::vector<QueueFamilyProperties2,Allocator> PhysicalDevice::getQueueFamilyProperties2(Allocator const& vectorAllocator, Dispatch const &d ) const
-  {
-    std::vector<QueueFamilyProperties2,Allocator> queueFamilyProperties( vectorAllocator );
-    uint32_t queueFamilyPropertyCount;
-    d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
-    queueFamilyProperties.resize( queueFamilyPropertyCount );
-    d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2*>( queueFamilyProperties.data() ) );
-    return queueFamilyProperties;
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties2KHR( uint32_t* pQueueFamilyPropertyCount, QueueFamilyProperties2* pQueueFamilyProperties, Dispatch const &d) const
-  {
-    d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2*>( pQueueFamilyProperties ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE std::vector<QueueFamilyProperties2,Allocator> PhysicalDevice::getQueueFamilyProperties2KHR(Dispatch const &d ) const
-  {
-    std::vector<QueueFamilyProperties2,Allocator> queueFamilyProperties;
-    uint32_t queueFamilyPropertyCount;
-    d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
-    queueFamilyProperties.resize( queueFamilyPropertyCount );
-    d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2*>( queueFamilyProperties.data() ) );
-    return queueFamilyProperties;
-  }
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE std::vector<QueueFamilyProperties2,Allocator> PhysicalDevice::getQueueFamilyProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const
-  {
-    std::vector<QueueFamilyProperties2,Allocator> queueFamilyProperties( vectorAllocator );
-    uint32_t queueFamilyPropertyCount;
-    d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
-    queueFamilyProperties.resize( queueFamilyPropertyCount );
-    d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2*>( queueFamilyProperties.data() ) );
-    return queueFamilyProperties;
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties2( PhysicalDeviceMemoryProperties2* pMemoryProperties, Dispatch const &d) const
-  {
-    d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2*>( pMemoryProperties ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE PhysicalDeviceMemoryProperties2 PhysicalDevice::getMemoryProperties2(Dispatch const &d ) const
-  {
-    PhysicalDeviceMemoryProperties2 memoryProperties;
-    d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2*>( &memoryProperties ) );
-    return memoryProperties;
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties2KHR( PhysicalDeviceMemoryProperties2* pMemoryProperties, Dispatch const &d) const
-  {
-    d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2*>( pMemoryProperties ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE PhysicalDeviceMemoryProperties2 PhysicalDevice::getMemoryProperties2KHR(Dispatch const &d ) const
-  {
-    PhysicalDeviceMemoryProperties2 memoryProperties;
-    d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2*>( &memoryProperties ) );
-    return memoryProperties;
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, SparseImageFormatProperties2* pProperties, Dispatch const &d) const
-  {
-    d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2*>( pFormatInfo ), pPropertyCount, reinterpret_cast<VkSparseImageFormatProperties2*>( pProperties ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties2,Allocator> PhysicalDevice::getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const &d ) const
-  {
-    std::vector<SparseImageFormatProperties2,Allocator> properties;
-    uint32_t propertyCount;
-    d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2*>( &formatInfo ), &propertyCount, nullptr );
-    properties.resize( propertyCount );
-    d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2*>( &formatInfo ), &propertyCount, reinterpret_cast<VkSparseImageFormatProperties2*>( properties.data() ) );
-    return properties;
-  }
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties2,Allocator> PhysicalDevice::getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Allocator const& vectorAllocator, Dispatch const &d ) const
-  {
-    std::vector<SparseImageFormatProperties2,Allocator> properties( vectorAllocator );
-    uint32_t propertyCount;
-    d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2*>( &formatInfo ), &propertyCount, nullptr );
-    properties.resize( propertyCount );
-    d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2*>( &formatInfo ), &propertyCount, reinterpret_cast<VkSparseImageFormatProperties2*>( properties.data() ) );
-    return properties;
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, SparseImageFormatProperties2* pProperties, Dispatch const &d) const
-  {
-    d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2*>( pFormatInfo ), pPropertyCount, reinterpret_cast<VkSparseImageFormatProperties2*>( pProperties ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties2,Allocator> PhysicalDevice::getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const &d ) const
-  {
-    std::vector<SparseImageFormatProperties2,Allocator> properties;
-    uint32_t propertyCount;
-    d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2*>( &formatInfo ), &propertyCount, nullptr );
-    properties.resize( propertyCount );
-    d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2*>( &formatInfo ), &propertyCount, reinterpret_cast<VkSparseImageFormatProperties2*>( properties.data() ) );
-    return properties;
-  }
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties2,Allocator> PhysicalDevice::getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Allocator const& vectorAllocator, Dispatch const &d ) const
-  {
-    std::vector<SparseImageFormatProperties2,Allocator> properties( vectorAllocator );
-    uint32_t propertyCount;
-    d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2*>( &formatInfo ), &propertyCount, nullptr );
-    properties.resize( propertyCount );
-    d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2*>( &formatInfo ), &propertyCount, reinterpret_cast<VkSparseImageFormatProperties2*>( properties.data() ) );
-    return properties;
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferProperties( const PhysicalDeviceExternalBufferInfo* pExternalBufferInfo, ExternalBufferProperties* pExternalBufferProperties, Dispatch const &d) const
-  {
-    d.vkGetPhysicalDeviceExternalBufferProperties( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo*>( pExternalBufferInfo ), reinterpret_cast<VkExternalBufferProperties*>( pExternalBufferProperties ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ExternalBufferProperties PhysicalDevice::getExternalBufferProperties( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const &d ) const
-  {
-    ExternalBufferProperties externalBufferProperties;
-    d.vkGetPhysicalDeviceExternalBufferProperties( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo*>( &externalBufferInfo ), reinterpret_cast<VkExternalBufferProperties*>( &externalBufferProperties ) );
-    return externalBufferProperties;
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferPropertiesKHR( const PhysicalDeviceExternalBufferInfo* pExternalBufferInfo, ExternalBufferProperties* pExternalBufferProperties, Dispatch const &d) const
-  {
-    d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo*>( pExternalBufferInfo ), reinterpret_cast<VkExternalBufferProperties*>( pExternalBufferProperties ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ExternalBufferProperties PhysicalDevice::getExternalBufferPropertiesKHR( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const &d ) const
-  {
-    ExternalBufferProperties externalBufferProperties;
-    d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo*>( &externalBufferInfo ), reinterpret_cast<VkExternalBufferProperties*>( &externalBufferProperties ) );
-    return externalBufferProperties;
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void PhysicalDevice::getExternalSemaphoreProperties( const PhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, ExternalSemaphoreProperties* pExternalSemaphoreProperties, Dispatch const &d) const
-  {
-    d.vkGetPhysicalDeviceExternalSemaphoreProperties( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo*>( pExternalSemaphoreInfo ), reinterpret_cast<VkExternalSemaphoreProperties*>( pExternalSemaphoreProperties ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ExternalSemaphoreProperties PhysicalDevice::getExternalSemaphoreProperties( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const &d ) const
-  {
-    ExternalSemaphoreProperties externalSemaphoreProperties;
-    d.vkGetPhysicalDeviceExternalSemaphoreProperties( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo*>( &externalSemaphoreInfo ), reinterpret_cast<VkExternalSemaphoreProperties*>( &externalSemaphoreProperties ) );
-    return externalSemaphoreProperties;
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void PhysicalDevice::getExternalSemaphorePropertiesKHR( const PhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, ExternalSemaphoreProperties* pExternalSemaphoreProperties, Dispatch const &d) const
-  {
-    d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo*>( pExternalSemaphoreInfo ), reinterpret_cast<VkExternalSemaphoreProperties*>( pExternalSemaphoreProperties ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ExternalSemaphoreProperties PhysicalDevice::getExternalSemaphorePropertiesKHR( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const &d ) const
-  {
-    ExternalSemaphoreProperties externalSemaphoreProperties;
-    d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo*>( &externalSemaphoreInfo ), reinterpret_cast<VkExternalSemaphoreProperties*>( &externalSemaphoreProperties ) );
-    return externalSemaphoreProperties;
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void PhysicalDevice::getExternalFenceProperties( const PhysicalDeviceExternalFenceInfo* pExternalFenceInfo, ExternalFenceProperties* pExternalFenceProperties, Dispatch const &d) const
-  {
-    d.vkGetPhysicalDeviceExternalFenceProperties( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo*>( pExternalFenceInfo ), reinterpret_cast<VkExternalFenceProperties*>( pExternalFenceProperties ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ExternalFenceProperties PhysicalDevice::getExternalFenceProperties( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const &d ) const
-  {
-    ExternalFenceProperties externalFenceProperties;
-    d.vkGetPhysicalDeviceExternalFenceProperties( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo*>( &externalFenceInfo ), reinterpret_cast<VkExternalFenceProperties*>( &externalFenceProperties ) );
-    return externalFenceProperties;
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void PhysicalDevice::getExternalFencePropertiesKHR( const PhysicalDeviceExternalFenceInfo* pExternalFenceInfo, ExternalFenceProperties* pExternalFenceProperties, Dispatch const &d) const
-  {
-    d.vkGetPhysicalDeviceExternalFencePropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo*>( pExternalFenceInfo ), reinterpret_cast<VkExternalFenceProperties*>( pExternalFenceProperties ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ExternalFenceProperties PhysicalDevice::getExternalFencePropertiesKHR( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const &d ) const
-  {
-    ExternalFenceProperties externalFenceProperties;
-    d.vkGetPhysicalDeviceExternalFencePropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo*>( &externalFenceInfo ), reinterpret_cast<VkExternalFenceProperties*>( &externalFenceProperties ) );
-    return externalFenceProperties;
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result PhysicalDevice::releaseDisplayEXT( DisplayKHR display, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkReleaseDisplayEXT( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) );
-  }
-#else
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<void>::type PhysicalDevice::releaseDisplayEXT( DisplayKHR display, Dispatch const &d ) const
-  {
-    Result result = static_cast<Result>( d.vkReleaseDisplayEXT( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) );
-    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::releaseDisplayEXT" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VK_USE_PLATFORM_XLIB_XRANDR_NV
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result PhysicalDevice::acquireXlibDisplayEXT( Display* dpy, DisplayKHR display, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkAcquireXlibDisplayEXT( m_physicalDevice, dpy, static_cast<VkDisplayKHR>( display ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<Display>::type PhysicalDevice::acquireXlibDisplayEXT( DisplayKHR display, Dispatch const &d ) const
-  {
-    Display dpy;
-    Result result = static_cast<Result>( d.vkAcquireXlibDisplayEXT( m_physicalDevice, &dpy, static_cast<VkDisplayKHR>( display ) ) );
-    return createResultValue( result, dpy, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::acquireXlibDisplayEXT" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_XLIB_XRANDR_NV*/
-
-#ifdef VK_USE_PLATFORM_XLIB_XRANDR_NV
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result PhysicalDevice::getRandROutputDisplayEXT( Display* dpy, RROutput rrOutput, DisplayKHR* pDisplay, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkGetRandROutputDisplayEXT( m_physicalDevice, dpy, rrOutput, reinterpret_cast<VkDisplayKHR*>( pDisplay ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<DisplayKHR>::type PhysicalDevice::getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput, Dispatch const &d ) const
-  {
-    DisplayKHR display;
-    Result result = static_cast<Result>( d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast<VkDisplayKHR*>( &display ) ) );
-    return createResultValue( result, display, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getRandROutputDisplayEXT" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_XLIB_XRANDR_NV*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilities2EXT( SurfaceKHR surface, SurfaceCapabilities2EXT* pSurfaceCapabilities, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilities2EXT*>( pSurfaceCapabilities ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<SurfaceCapabilities2EXT>::type PhysicalDevice::getSurfaceCapabilities2EXT( SurfaceKHR surface, Dispatch const &d ) const
-  {
-    SurfaceCapabilities2EXT surfaceCapabilities;
-    Result result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilities2EXT*>( &surfaceCapabilities ) ) );
-    return createResultValue( result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceCapabilities2EXT" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result PhysicalDevice::getPresentRectanglesKHR( SurfaceKHR surface, uint32_t* pRectCount, Rect2D* pRects, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pRectCount, reinterpret_cast<VkRect2D*>( pRects ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<Rect2D,Allocator>>::type PhysicalDevice::getPresentRectanglesKHR( SurfaceKHR surface, Dispatch const &d ) const
-  {
-    std::vector<Rect2D,Allocator> rects;
-    uint32_t rectCount;
-    Result result;
-    do
-    {
-      result = static_cast<Result>( d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, nullptr ) );
-      if ( ( result == Result::eSuccess ) && rectCount )
-      {
-        rects.resize( rectCount );
-        result = static_cast<Result>( d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, reinterpret_cast<VkRect2D*>( rects.data() ) ) );
-      }
-    } while ( result == Result::eIncomplete );
-    VULKAN_HPP_ASSERT( rectCount <= rects.size() );
-    rects.resize( rectCount );
-    return createResultValue( result, rects, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getPresentRectanglesKHR" );
-  }
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<Rect2D,Allocator>>::type PhysicalDevice::getPresentRectanglesKHR( SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const
-  {
-    std::vector<Rect2D,Allocator> rects( vectorAllocator );
-    uint32_t rectCount;
-    Result result;
-    do
-    {
-      result = static_cast<Result>( d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, nullptr ) );
-      if ( ( result == Result::eSuccess ) && rectCount )
-      {
-        rects.resize( rectCount );
-        result = static_cast<Result>( d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, reinterpret_cast<VkRect2D*>( rects.data() ) ) );
-      }
-    } while ( result == Result::eIncomplete );
-    VULKAN_HPP_ASSERT( rectCount <= rects.size() );
-    rects.resize( rectCount );
-    return createResultValue( result, rects, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getPresentRectanglesKHR" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void PhysicalDevice::getMultisamplePropertiesEXT( SampleCountFlagBits samples, MultisamplePropertiesEXT* pMultisampleProperties, Dispatch const &d) const
-  {
-    d.vkGetPhysicalDeviceMultisamplePropertiesEXT( m_physicalDevice, static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast<VkMultisamplePropertiesEXT*>( pMultisampleProperties ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE MultisamplePropertiesEXT PhysicalDevice::getMultisamplePropertiesEXT( SampleCountFlagBits samples, Dispatch const &d ) const
-  {
-    MultisamplePropertiesEXT multisampleProperties;
-    d.vkGetPhysicalDeviceMultisamplePropertiesEXT( m_physicalDevice, static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast<VkMultisamplePropertiesEXT*>( &multisampleProperties ) );
-    return multisampleProperties;
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, SurfaceCapabilities2KHR* pSurfaceCapabilities, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( pSurfaceInfo ), reinterpret_cast<VkSurfaceCapabilities2KHR*>( pSurfaceCapabilities ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<SurfaceCapabilities2KHR>::type PhysicalDevice::getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const
-  {
-    SurfaceCapabilities2KHR surfaceCapabilities;
-    Result result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( &surfaceInfo ), reinterpret_cast<VkSurfaceCapabilities2KHR*>( &surfaceCapabilities ) ) );
-    return createResultValue( result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceCapabilities2KHR" );
-  }
-  template <typename X, typename Y, typename ...Z, typename Dispatch>
-  VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type PhysicalDevice::getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const
-  {
-    StructureChain<X, Y, Z...> structureChain;
-    SurfaceCapabilities2KHR& surfaceCapabilities = structureChain.template get<SurfaceCapabilities2KHR>();
-    Result result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( &surfaceInfo ), reinterpret_cast<VkSurfaceCapabilities2KHR*>( &surfaceCapabilities ) ) );
-    return createResultValue( result, structureChain, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceCapabilities2KHR" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, SurfaceFormat2KHR* pSurfaceFormats, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( pSurfaceInfo ), pSurfaceFormatCount, reinterpret_cast<VkSurfaceFormat2KHR*>( pSurfaceFormats ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<SurfaceFormat2KHR,Allocator>>::type PhysicalDevice::getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const
-  {
-    std::vector<SurfaceFormat2KHR,Allocator> surfaceFormats;
-    uint32_t surfaceFormatCount;
-    Result result;
-    do
-    {
-      result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( &surfaceInfo ), &surfaceFormatCount, nullptr ) );
-      if ( ( result == Result::eSuccess ) && surfaceFormatCount )
-      {
-        surfaceFormats.resize( surfaceFormatCount );
-        result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( &surfaceInfo ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormat2KHR*>( surfaceFormats.data() ) ) );
-      }
-    } while ( result == Result::eIncomplete );
-    VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
-    surfaceFormats.resize( surfaceFormatCount );
-    return createResultValue( result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceFormats2KHR" );
-  }
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<SurfaceFormat2KHR,Allocator>>::type PhysicalDevice::getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Allocator const& vectorAllocator, Dispatch const &d ) const
-  {
-    std::vector<SurfaceFormat2KHR,Allocator> surfaceFormats( vectorAllocator );
-    uint32_t surfaceFormatCount;
-    Result result;
-    do
-    {
-      result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( &surfaceInfo ), &surfaceFormatCount, nullptr ) );
-      if ( ( result == Result::eSuccess ) && surfaceFormatCount )
-      {
-        surfaceFormats.resize( surfaceFormatCount );
-        result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( &surfaceInfo ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormat2KHR*>( surfaceFormats.data() ) ) );
-      }
-    } while ( result == Result::eIncomplete );
-    VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
-    surfaceFormats.resize( surfaceFormatCount );
-    return createResultValue( result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceFormats2KHR" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayProperties2KHR( uint32_t* pPropertyCount, DisplayProperties2KHR* pProperties, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayProperties2KHR*>( pProperties ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayProperties2KHR,Allocator>>::type PhysicalDevice::getDisplayProperties2KHR(Dispatch const &d ) const
-  {
-    std::vector<DisplayProperties2KHR,Allocator> properties;
-    uint32_t propertyCount;
-    Result result;
-    do
-    {
-      result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) );
-      if ( ( result == Result::eSuccess ) && propertyCount )
-      {
-        properties.resize( propertyCount );
-        result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayProperties2KHR*>( properties.data() ) ) );
-      }
-    } while ( result == Result::eIncomplete );
-    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
-    properties.resize( propertyCount );
-    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayProperties2KHR" );
-  }
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayProperties2KHR,Allocator>>::type PhysicalDevice::getDisplayProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const
-  {
-    std::vector<DisplayProperties2KHR,Allocator> properties( vectorAllocator );
-    uint32_t propertyCount;
-    Result result;
-    do
-    {
-      result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) );
-      if ( ( result == Result::eSuccess ) && propertyCount )
-      {
-        properties.resize( propertyCount );
-        result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayProperties2KHR*>( properties.data() ) ) );
-      }
-    } while ( result == Result::eIncomplete );
-    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
-    properties.resize( propertyCount );
-    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayProperties2KHR" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneProperties2KHR( uint32_t* pPropertyCount, DisplayPlaneProperties2KHR* pProperties, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR*>( pProperties ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayPlaneProperties2KHR,Allocator>>::type PhysicalDevice::getDisplayPlaneProperties2KHR(Dispatch const &d ) const
-  {
-    std::vector<DisplayPlaneProperties2KHR,Allocator> properties;
-    uint32_t propertyCount;
-    Result result;
-    do
-    {
-      result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) );
-      if ( ( result == Result::eSuccess ) && propertyCount )
-      {
-        properties.resize( propertyCount );
-        result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR*>( properties.data() ) ) );
-      }
-    } while ( result == Result::eIncomplete );
-    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
-    properties.resize( propertyCount );
-    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlaneProperties2KHR" );
-  }
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayPlaneProperties2KHR,Allocator>>::type PhysicalDevice::getDisplayPlaneProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const
-  {
-    std::vector<DisplayPlaneProperties2KHR,Allocator> properties( vectorAllocator );
-    uint32_t propertyCount;
-    Result result;
-    do
-    {
-      result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) );
-      if ( ( result == Result::eSuccess ) && propertyCount )
-      {
-        properties.resize( propertyCount );
-        result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR*>( properties.data() ) ) );
-      }
-    } while ( result == Result::eIncomplete );
-    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
-    properties.resize( propertyCount );
-    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlaneProperties2KHR" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModeProperties2KHR( DisplayKHR display, uint32_t* pPropertyCount, DisplayModeProperties2KHR* pProperties, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), pPropertyCount, reinterpret_cast<VkDisplayModeProperties2KHR*>( pProperties ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayModeProperties2KHR,Allocator>>::type PhysicalDevice::getDisplayModeProperties2KHR( DisplayKHR display, Dispatch const &d ) const
-  {
-    std::vector<DisplayModeProperties2KHR,Allocator> properties;
-    uint32_t propertyCount;
-    Result result;
-    do
-    {
-      result = static_cast<Result>( d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) );
-      if ( ( result == Result::eSuccess ) && propertyCount )
-      {
-        properties.resize( propertyCount );
-        result = static_cast<Result>( d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModeProperties2KHR*>( properties.data() ) ) );
-      }
-    } while ( result == Result::eIncomplete );
-    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
-    properties.resize( propertyCount );
-    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayModeProperties2KHR" );
-  }
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayModeProperties2KHR,Allocator>>::type PhysicalDevice::getDisplayModeProperties2KHR( DisplayKHR display, Allocator const& vectorAllocator, Dispatch const &d ) const
-  {
-    std::vector<DisplayModeProperties2KHR,Allocator> properties( vectorAllocator );
-    uint32_t propertyCount;
-    Result result;
-    do
-    {
-      result = static_cast<Result>( d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) );
-      if ( ( result == Result::eSuccess ) && propertyCount )
-      {
-        properties.resize( propertyCount );
-        result = static_cast<Result>( d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModeProperties2KHR*>( properties.data() ) ) );
-      }
-    } while ( result == Result::eIncomplete );
-    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
-    properties.resize( propertyCount );
-    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayModeProperties2KHR" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneCapabilities2KHR( const DisplayPlaneInfo2KHR* pDisplayPlaneInfo, DisplayPlaneCapabilities2KHR* pCapabilities, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice, reinterpret_cast<const VkDisplayPlaneInfo2KHR*>( pDisplayPlaneInfo ), reinterpret_cast<VkDisplayPlaneCapabilities2KHR*>( pCapabilities ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<DisplayPlaneCapabilities2KHR>::type PhysicalDevice::getDisplayPlaneCapabilities2KHR( const DisplayPlaneInfo2KHR & displayPlaneInfo, Dispatch const &d ) const
-  {
-    DisplayPlaneCapabilities2KHR capabilities;
-    Result result = static_cast<Result>( d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice, reinterpret_cast<const VkDisplayPlaneInfo2KHR*>( &displayPlaneInfo ), reinterpret_cast<VkDisplayPlaneCapabilities2KHR*>( &capabilities ) ) );
-    return createResultValue( result, capabilities, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlaneCapabilities2KHR" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result PhysicalDevice::getCalibrateableTimeDomainsEXT( uint32_t* pTimeDomainCount, TimeDomainEXT* pTimeDomains, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, pTimeDomainCount, reinterpret_cast<VkTimeDomainEXT*>( pTimeDomains ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<TimeDomainEXT,Allocator>>::type PhysicalDevice::getCalibrateableTimeDomainsEXT(Dispatch const &d ) const
-  {
-    std::vector<TimeDomainEXT,Allocator> timeDomains;
-    uint32_t timeDomainCount;
-    Result result;
-    do
-    {
-      result = static_cast<Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr ) );
-      if ( ( result == Result::eSuccess ) && timeDomainCount )
-      {
-        timeDomains.resize( timeDomainCount );
-        result = static_cast<Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, reinterpret_cast<VkTimeDomainEXT*>( timeDomains.data() ) ) );
-      }
-    } while ( result == Result::eIncomplete );
-    VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() );
-    timeDomains.resize( timeDomainCount );
-    return createResultValue( result, timeDomains, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getCalibrateableTimeDomainsEXT" );
-  }
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<TimeDomainEXT,Allocator>>::type PhysicalDevice::getCalibrateableTimeDomainsEXT(Allocator const& vectorAllocator, Dispatch const &d ) const
-  {
-    std::vector<TimeDomainEXT,Allocator> timeDomains( vectorAllocator );
-    uint32_t timeDomainCount;
-    Result result;
-    do
-    {
-      result = static_cast<Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr ) );
-      if ( ( result == Result::eSuccess ) && timeDomainCount )
-      {
-        timeDomains.resize( timeDomainCount );
-        result = static_cast<Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, reinterpret_cast<VkTimeDomainEXT*>( timeDomains.data() ) ) );
-      }
-    } while ( result == Result::eIncomplete );
-    VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() );
-    timeDomains.resize( timeDomainCount );
-    return createResultValue( result, timeDomains, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getCalibrateableTimeDomainsEXT" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  struct CmdProcessCommandsInfoNVX
-  {
-    CmdProcessCommandsInfoNVX( ObjectTableNVX objectTable_ = ObjectTableNVX(),
-                               IndirectCommandsLayoutNVX indirectCommandsLayout_ = IndirectCommandsLayoutNVX(),
-                               uint32_t indirectCommandsTokenCount_ = 0,
-                               const IndirectCommandsTokenNVX* pIndirectCommandsTokens_ = nullptr,
-                               uint32_t maxSequencesCount_ = 0,
-                               CommandBuffer targetCommandBuffer_ = CommandBuffer(),
-                               Buffer sequencesCountBuffer_ = Buffer(),
-                               DeviceSize sequencesCountOffset_ = 0,
-                               Buffer sequencesIndexBuffer_ = Buffer(),
-                               DeviceSize sequencesIndexOffset_ = 0 )
-      : objectTable( objectTable_ )
-      , indirectCommandsLayout( indirectCommandsLayout_ )
-      , indirectCommandsTokenCount( indirectCommandsTokenCount_ )
-      , pIndirectCommandsTokens( pIndirectCommandsTokens_ )
-      , maxSequencesCount( maxSequencesCount_ )
-      , targetCommandBuffer( targetCommandBuffer_ )
-      , sequencesCountBuffer( sequencesCountBuffer_ )
-      , sequencesCountOffset( sequencesCountOffset_ )
-      , sequencesIndexBuffer( sequencesIndexBuffer_ )
-      , sequencesIndexOffset( sequencesIndexOffset_ )
-    {
-    }
-
-    CmdProcessCommandsInfoNVX( VkCmdProcessCommandsInfoNVX const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( CmdProcessCommandsInfoNVX ) );
-    }
-
-    CmdProcessCommandsInfoNVX& operator=( VkCmdProcessCommandsInfoNVX const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( CmdProcessCommandsInfoNVX ) );
-      return *this;
-    }
-    CmdProcessCommandsInfoNVX& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    CmdProcessCommandsInfoNVX& setObjectTable( ObjectTableNVX objectTable_ )
-    {
-      objectTable = objectTable_;
-      return *this;
-    }
-
-    CmdProcessCommandsInfoNVX& setIndirectCommandsLayout( IndirectCommandsLayoutNVX indirectCommandsLayout_ )
-    {
-      indirectCommandsLayout = indirectCommandsLayout_;
-      return *this;
-    }
-
-    CmdProcessCommandsInfoNVX& setIndirectCommandsTokenCount( uint32_t indirectCommandsTokenCount_ )
-    {
-      indirectCommandsTokenCount = indirectCommandsTokenCount_;
-      return *this;
-    }
-
-    CmdProcessCommandsInfoNVX& setPIndirectCommandsTokens( const IndirectCommandsTokenNVX* pIndirectCommandsTokens_ )
-    {
-      pIndirectCommandsTokens = pIndirectCommandsTokens_;
-      return *this;
-    }
-
-    CmdProcessCommandsInfoNVX& setMaxSequencesCount( uint32_t maxSequencesCount_ )
-    {
-      maxSequencesCount = maxSequencesCount_;
-      return *this;
-    }
-
-    CmdProcessCommandsInfoNVX& setTargetCommandBuffer( CommandBuffer targetCommandBuffer_ )
-    {
-      targetCommandBuffer = targetCommandBuffer_;
-      return *this;
-    }
-
-    CmdProcessCommandsInfoNVX& setSequencesCountBuffer( Buffer sequencesCountBuffer_ )
-    {
-      sequencesCountBuffer = sequencesCountBuffer_;
-      return *this;
-    }
-
-    CmdProcessCommandsInfoNVX& setSequencesCountOffset( DeviceSize sequencesCountOffset_ )
-    {
-      sequencesCountOffset = sequencesCountOffset_;
-      return *this;
-    }
-
-    CmdProcessCommandsInfoNVX& setSequencesIndexBuffer( Buffer sequencesIndexBuffer_ )
-    {
-      sequencesIndexBuffer = sequencesIndexBuffer_;
-      return *this;
-    }
-
-    CmdProcessCommandsInfoNVX& setSequencesIndexOffset( DeviceSize sequencesIndexOffset_ )
-    {
-      sequencesIndexOffset = sequencesIndexOffset_;
-      return *this;
-    }
-
-    operator VkCmdProcessCommandsInfoNVX const&() const
-    {
-      return *reinterpret_cast<const VkCmdProcessCommandsInfoNVX*>(this);
-    }
-
-    operator VkCmdProcessCommandsInfoNVX &()
-    {
-      return *reinterpret_cast<VkCmdProcessCommandsInfoNVX*>(this);
-    }
-
-    bool operator==( CmdProcessCommandsInfoNVX const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( objectTable == rhs.objectTable )
-          && ( indirectCommandsLayout == rhs.indirectCommandsLayout )
-          && ( indirectCommandsTokenCount == rhs.indirectCommandsTokenCount )
-          && ( pIndirectCommandsTokens == rhs.pIndirectCommandsTokens )
-          && ( maxSequencesCount == rhs.maxSequencesCount )
-          && ( targetCommandBuffer == rhs.targetCommandBuffer )
-          && ( sequencesCountBuffer == rhs.sequencesCountBuffer )
-          && ( sequencesCountOffset == rhs.sequencesCountOffset )
-          && ( sequencesIndexBuffer == rhs.sequencesIndexBuffer )
-          && ( sequencesIndexOffset == rhs.sequencesIndexOffset );
-    }
-
-    bool operator!=( CmdProcessCommandsInfoNVX const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::eCmdProcessCommandsInfoNVX;
-
-  public:
-    const void* pNext = nullptr;
-    ObjectTableNVX objectTable;
-    IndirectCommandsLayoutNVX indirectCommandsLayout;
-    uint32_t indirectCommandsTokenCount;
-    const IndirectCommandsTokenNVX* pIndirectCommandsTokens;
-    uint32_t maxSequencesCount;
-    CommandBuffer targetCommandBuffer;
-    Buffer sequencesCountBuffer;
-    DeviceSize sequencesCountOffset;
-    Buffer sequencesIndexBuffer;
-    DeviceSize sequencesIndexOffset;
-  };
-  static_assert( sizeof( CmdProcessCommandsInfoNVX ) == sizeof( VkCmdProcessCommandsInfoNVX ), "struct and wrapper have different size!" );
-
-  struct PhysicalDeviceGroupProperties
-  {
-    operator VkPhysicalDeviceGroupProperties const&() const
-    {
-      return *reinterpret_cast<const VkPhysicalDeviceGroupProperties*>(this);
-    }
-
-    operator VkPhysicalDeviceGroupProperties &()
-    {
-      return *reinterpret_cast<VkPhysicalDeviceGroupProperties*>(this);
-    }
-
-    bool operator==( PhysicalDeviceGroupProperties const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( physicalDeviceCount == rhs.physicalDeviceCount )
-          && ( memcmp( physicalDevices, rhs.physicalDevices, VK_MAX_DEVICE_GROUP_SIZE * sizeof( PhysicalDevice ) ) == 0 )
-          && ( subsetAllocation == rhs.subsetAllocation );
-    }
-
-    bool operator!=( PhysicalDeviceGroupProperties const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-  private:
-    StructureType sType = StructureType::ePhysicalDeviceGroupProperties;
-
-  public:
-    void* pNext = nullptr;
-    uint32_t physicalDeviceCount;
-    PhysicalDevice physicalDevices[VK_MAX_DEVICE_GROUP_SIZE];
-    Bool32 subsetAllocation;
-  };
-  static_assert( sizeof( PhysicalDeviceGroupProperties ) == sizeof( VkPhysicalDeviceGroupProperties ), "struct and wrapper have different size!" );
-
-  using PhysicalDeviceGroupPropertiesKHR = PhysicalDeviceGroupProperties;
-
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-  class Instance;
-
-  template <typename Dispatch> class UniqueHandleTraits<DebugReportCallbackEXT,Dispatch> {public: using deleter = ObjectDestroy<Instance,Dispatch>; };
-  using UniqueDebugReportCallbackEXT = UniqueHandle<DebugReportCallbackEXT,DispatchLoaderStatic>;
-  template <typename Dispatch> class UniqueHandleTraits<DebugUtilsMessengerEXT,Dispatch> {public: using deleter = ObjectDestroy<Instance,Dispatch>; };
-  using UniqueDebugUtilsMessengerEXT = UniqueHandle<DebugUtilsMessengerEXT,DispatchLoaderStatic>;
-  template <typename Dispatch> class UniqueHandleTraits<SurfaceKHR,Dispatch> {public: using deleter = ObjectDestroy<Instance,Dispatch>; };
-  using UniqueSurfaceKHR = UniqueHandle<SurfaceKHR,DispatchLoaderStatic>;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-
-  class Instance
-  {
-  public:
-    VULKAN_HPP_CONSTEXPR Instance()
-      : m_instance(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_CONSTEXPR Instance( std::nullptr_t )
-      : m_instance(VK_NULL_HANDLE)
-    {}
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT Instance( VkInstance instance )
-      : m_instance( instance )
-    {}
-
-#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
-    Instance & operator=(VkInstance instance)
-    {
-      m_instance = instance;
-      return *this; 
-    }
-#endif
-
-    Instance & operator=( std::nullptr_t )
-    {
-      m_instance = VK_NULL_HANDLE;
-      return *this;
-    }
-
-    bool operator==( Instance const & rhs ) const
-    {
-      return m_instance == rhs.m_instance;
-    }
-
-    bool operator!=(Instance const & rhs ) const
-    {
-      return m_instance != rhs.m_instance;
-    }
-
-    bool operator<(Instance const & rhs ) const
-    {
-      return m_instance < rhs.m_instance;
-    }
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroy( const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroy( Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result enumeratePhysicalDevices( uint32_t* pPhysicalDeviceCount, PhysicalDevice* pPhysicalDevices, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Allocator = std::allocator<PhysicalDevice>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<PhysicalDevice,Allocator>>::type enumeratePhysicalDevices(Dispatch const &d = Dispatch() ) const;
-    template <typename Allocator = std::allocator<PhysicalDevice>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<PhysicalDevice,Allocator>>::type enumeratePhysicalDevices(Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
 
-    template<typename Dispatch = DispatchLoaderStatic>
-    PFN_vkVoidFunction getProcAddr( const char* pName, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    PFN_vkVoidFunction getProcAddr( const std::string & name, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
 #ifdef VK_USE_PLATFORM_ANDROID_KHR
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d = Dispatch() ) const;
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkCreateAndroidSurfaceKHR( m_instance, reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkSurfaceKHR *>( pSurface ) ) );
+  }
+
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<SurfaceKHR>::type createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-    template<typename Dispatch = DispatchLoaderStatic>
-    typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type createAndroidSurfaceKHRUnique( const AndroidSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    Result result = static_cast<Result>( d.vkCreateAndroidSurfaceKHR( m_instance, reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkSurfaceKHR *>( &surface ) ) );
+    return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHR" );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type Instance::createAndroidSurfaceKHRUnique( const AndroidSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    Result result = static_cast<Result>( d.vkCreateAndroidSurfaceKHR( m_instance, reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkSurfaceKHR *>( &surface ) ) );
+    ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
+    return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHRUnique", deleter );
+  }
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
 
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d = Dispatch() ) const;
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT* pCallback, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkCreateDebugReportCallbackEXT( m_instance, reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkDebugReportCallbackEXT *>( pCallback ) ) );
+  }
+
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<SurfaceKHR>::type createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-    template<typename Dispatch = DispatchLoaderStatic>
-    typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type createDisplayPlaneSurfaceKHRUnique( const DisplaySurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT>::type Instance::createDebugReportCallbackEXT( const DebugReportCallbackCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback;
+    Result result = static_cast<Result>( d.vkCreateDebugReportCallbackEXT( m_instance, reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkDebugReportCallbackEXT *>( &callback ) ) );
+    return createResultValue( result, callback, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXT" );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT, Dispatch>>::type Instance::createDebugReportCallbackEXTUnique( const DebugReportCallbackCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback;
+    Result result = static_cast<Result>( d.vkCreateDebugReportCallbackEXT( m_instance, reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkDebugReportCallbackEXT *>( &callback ) ) );
+    ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
+    return createResultValue<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT, Dispatch>( result, callback, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXTUnique", deleter );
+  }
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroySurfaceKHR( SurfaceKHR surface, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT* pMessenger, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkCreateDebugUtilsMessengerEXT( m_instance, reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkDebugUtilsMessengerEXT *>( pMessenger ) ) );
+  }
+
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroySurfaceKHR( SurfaceKHR surface, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT>::type Instance::createDebugUtilsMessengerEXT( const DebugUtilsMessengerCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger;
+    Result result = static_cast<Result>( d.vkCreateDebugUtilsMessengerEXT( m_instance, reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkDebugUtilsMessengerEXT *>( &messenger ) ) );
+    return createResultValue( result, messenger, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXT" );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT, Dispatch>>::type Instance::createDebugUtilsMessengerEXTUnique( const DebugUtilsMessengerCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger;
+    Result result = static_cast<Result>( d.vkCreateDebugUtilsMessengerEXT( m_instance, reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkDebugUtilsMessengerEXT *>( &messenger ) ) );
+    ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
+    return createResultValue<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT, Dispatch>( result, messenger, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXTUnique", deleter );
+  }
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroy( SurfaceKHR surface, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
+
+#ifdef VK_USE_PLATFORM_DIRECTFB_EXT
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createDirectFBSurfaceEXT( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkCreateDirectFBSurfaceEXT( m_instance, reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkSurfaceKHR *>( pSurface ) ) );
+  }
+
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroy( SurfaceKHR surface, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createDirectFBSurfaceEXT( const DirectFBSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    Result result = static_cast<Result>( d.vkCreateDirectFBSurfaceEXT( m_instance, reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkSurfaceKHR *>( &surface ) ) );
+    return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDirectFBSurfaceEXT" );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type Instance::createDirectFBSurfaceEXTUnique( const DirectFBSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    Result result = static_cast<Result>( d.vkCreateDirectFBSurfaceEXT( m_instance, reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkSurfaceKHR *>( &surface ) ) );
+    ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
+    return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDirectFBSurfaceEXTUnique", deleter );
+  }
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkCreateDisplayPlaneSurfaceKHR( m_instance, reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkSurfaceKHR *>( pSurface ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    Result result = static_cast<Result>( d.vkCreateDisplayPlaneSurfaceKHR( m_instance, reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkSurfaceKHR *>( &surface ) ) );
+    return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHR" );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type Instance::createDisplayPlaneSurfaceKHRUnique( const DisplaySurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    Result result = static_cast<Result>( d.vkCreateDisplayPlaneSurfaceKHR( m_instance, reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkSurfaceKHR *>( &surface ) ) );
+    ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
+    return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHRUnique", deleter );
+  }
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifdef VK_USE_PLATFORM_VI_NN
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result createViSurfaceNN( const ViSurfaceCreateInfoNN* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<SurfaceKHR>::type createViSurfaceNN( const ViSurfaceCreateInfoNN & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-    template<typename Dispatch = DispatchLoaderStatic>
-    typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type createViSurfaceNNUnique( const ViSurfaceCreateInfoNN & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_VI_NN*/
 
-#ifdef VK_USE_PLATFORM_WAYLAND_KHR
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<SurfaceKHR>::type createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-    template<typename Dispatch = DispatchLoaderStatic>
-    typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type createWaylandSurfaceKHRUnique( const WaylandSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkCreateHeadlessSurfaceEXT( m_instance, reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkSurfaceKHR *>( pSurface ) ) );
+  }
 
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d = Dispatch() ) const;
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<SurfaceKHR>::type createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-    template<typename Dispatch = DispatchLoaderStatic>
-    typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type createWin32SurfaceKHRUnique( const Win32SurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createHeadlessSurfaceEXT( const HeadlessSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    Result result = static_cast<Result>( d.vkCreateHeadlessSurfaceEXT( m_instance, reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkSurfaceKHR *>( &surface ) ) );
+    return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXT" );
+  }
 
-#ifdef VK_USE_PLATFORM_XLIB_KHR
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<SurfaceKHR>::type createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-    template<typename Dispatch = DispatchLoaderStatic>
-    typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type createXlibSurfaceKHRUnique( const XlibSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_XLIB_KHR*/
-
-#ifdef VK_USE_PLATFORM_XCB_KHR
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<SurfaceKHR>::type createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-    template<typename Dispatch = DispatchLoaderStatic>
-    typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type createXcbSurfaceKHRUnique( const XcbSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_XCB_KHR*/
-
-#ifdef VK_USE_PLATFORM_FUCHSIA_FUCHSIA
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result createImagePipeSurfaceFUCHSIA( const ImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<SurfaceKHR>::type createImagePipeSurfaceFUCHSIA( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-    template<typename Dispatch = DispatchLoaderStatic>
-    typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type createImagePipeSurfaceFUCHSIAUnique( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_FUCHSIA_FUCHSIA*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result createDebugReportCallbackEXT( const DebugReportCallbackCreateInfoEXT* pCreateInfo, const AllocationCallbacks* pAllocator, DebugReportCallbackEXT* pCallback, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<DebugReportCallbackEXT>::type createDebugReportCallbackEXT( const DebugReportCallbackCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-    template<typename Dispatch = DispatchLoaderStatic>
-    typename ResultValueType<UniqueHandle<DebugReportCallbackEXT,Dispatch>>::type createDebugReportCallbackEXTUnique( const DebugReportCallbackCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type Instance::createHeadlessSurfaceEXTUnique( const HeadlessSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    Result result = static_cast<Result>( d.vkCreateHeadlessSurfaceEXT( m_instance, reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkSurfaceKHR *>( &surface ) ) );
+    ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
+    return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXTUnique", deleter );
+  }
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroyDebugReportCallbackEXT( DebugReportCallbackEXT callback, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroyDebugReportCallbackEXT( DebugReportCallbackEXT callback, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroy( DebugReportCallbackEXT callback, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroy( DebugReportCallbackEXT callback, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void debugReportMessageEXT( DebugReportFlagsEXT flags, DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void debugReportMessageEXT( DebugReportFlagsEXT flags, DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const std::string & layerPrefix, const std::string & message, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result enumeratePhysicalDeviceGroups( uint32_t* pPhysicalDeviceGroupCount, PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Allocator = std::allocator<PhysicalDeviceGroupProperties>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<PhysicalDeviceGroupProperties,Allocator>>::type enumeratePhysicalDeviceGroups(Dispatch const &d = Dispatch() ) const;
-    template <typename Allocator = std::allocator<PhysicalDeviceGroupProperties>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<PhysicalDeviceGroupProperties,Allocator>>::type enumeratePhysicalDeviceGroups(Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result enumeratePhysicalDeviceGroupsKHR( uint32_t* pPhysicalDeviceGroupCount, PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template <typename Allocator = std::allocator<PhysicalDeviceGroupProperties>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<PhysicalDeviceGroupProperties,Allocator>>::type enumeratePhysicalDeviceGroupsKHR(Dispatch const &d = Dispatch() ) const;
-    template <typename Allocator = std::allocator<PhysicalDeviceGroupProperties>, typename Dispatch = DispatchLoaderStatic> 
-    typename ResultValueType<std::vector<PhysicalDeviceGroupProperties,Allocator>>::type enumeratePhysicalDeviceGroupsKHR(Allocator const& vectorAllocator, Dispatch const &d ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
 #ifdef VK_USE_PLATFORM_IOS_MVK
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result createIOSSurfaceMVK( const IOSSurfaceCreateInfoMVK* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d = Dispatch() ) const;
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkCreateIOSSurfaceMVK( m_instance, reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkSurfaceKHR *>( pSurface ) ) );
+  }
+
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<SurfaceKHR>::type createIOSSurfaceMVK( const IOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-    template<typename Dispatch = DispatchLoaderStatic>
-    typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type createIOSSurfaceMVKUnique( const IOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createIOSSurfaceMVK( const IOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    Result result = static_cast<Result>( d.vkCreateIOSSurfaceMVK( m_instance, reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkSurfaceKHR *>( &surface ) ) );
+    return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createIOSSurfaceMVK" );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type Instance::createIOSSurfaceMVKUnique( const IOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    Result result = static_cast<Result>( d.vkCreateIOSSurfaceMVK( m_instance, reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkSurfaceKHR *>( &surface ) ) );
+    ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
+    return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createIOSSurfaceMVKUnique", deleter );
+  }
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 #endif /*VK_USE_PLATFORM_IOS_MVK*/
 
-#ifdef VK_USE_PLATFORM_MACOS_MVK
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result createMacOSSurfaceMVK( const MacOSSurfaceCreateInfoMVK* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d = Dispatch() ) const;
+
+#ifdef VK_USE_PLATFORM_FUCHSIA
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createImagePipeSurfaceFUCHSIA( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkCreateImagePipeSurfaceFUCHSIA( m_instance, reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkSurfaceKHR *>( pSurface ) ) );
+  }
+
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<SurfaceKHR>::type createMacOSSurfaceMVK( const MacOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-    template<typename Dispatch = DispatchLoaderStatic>
-    typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type createMacOSSurfaceMVKUnique( const MacOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createImagePipeSurfaceFUCHSIA( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    Result result = static_cast<Result>( d.vkCreateImagePipeSurfaceFUCHSIA( m_instance, reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkSurfaceKHR *>( &surface ) ) );
+    return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createImagePipeSurfaceFUCHSIA" );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type Instance::createImagePipeSurfaceFUCHSIAUnique( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    Result result = static_cast<Result>( d.vkCreateImagePipeSurfaceFUCHSIA( m_instance, reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkSurfaceKHR *>( &surface ) ) );
+    ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
+    return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createImagePipeSurfaceFUCHSIAUnique", deleter );
+  }
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkCreateMacOSSurfaceMVK( m_instance, reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkSurfaceKHR *>( pSurface ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createMacOSSurfaceMVK( const MacOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    Result result = static_cast<Result>( d.vkCreateMacOSSurfaceMVK( m_instance, reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkSurfaceKHR *>( &surface ) ) );
+    return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMacOSSurfaceMVK" );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type Instance::createMacOSSurfaceMVKUnique( const MacOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    Result result = static_cast<Result>( d.vkCreateMacOSSurfaceMVK( m_instance, reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkSurfaceKHR *>( &surface ) ) );
+    ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
+    return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMacOSSurfaceMVKUnique", deleter );
+  }
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 #endif /*VK_USE_PLATFORM_MACOS_MVK*/
 
-    template<typename Dispatch = DispatchLoaderStatic>
-    Result createDebugUtilsMessengerEXT( const DebugUtilsMessengerCreateInfoEXT* pCreateInfo, const AllocationCallbacks* pAllocator, DebugUtilsMessengerEXT* pMessenger, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    ResultValueType<DebugUtilsMessengerEXT>::type createDebugUtilsMessengerEXT( const DebugUtilsMessengerCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-    template<typename Dispatch = DispatchLoaderStatic>
-    typename ResultValueType<UniqueHandle<DebugUtilsMessengerEXT,Dispatch>>::type createDebugUtilsMessengerEXTUnique( const DebugUtilsMessengerCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroyDebugUtilsMessengerEXT( DebugUtilsMessengerEXT messenger, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroyDebugUtilsMessengerEXT( DebugUtilsMessengerEXT messenger, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroy( DebugUtilsMessengerEXT messenger, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void destroy( DebugUtilsMessengerEXT messenger, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-    template<typename Dispatch = DispatchLoaderStatic>
-    void submitDebugUtilsMessageEXT( DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, DebugUtilsMessageTypeFlagsEXT messageTypes, const DebugUtilsMessengerCallbackDataEXT* pCallbackData, Dispatch const &d = Dispatch() ) const;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-    template<typename Dispatch = DispatchLoaderStatic>
-    void submitDebugUtilsMessageEXT( DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, DebugUtilsMessageTypeFlagsEXT messageTypes, const DebugUtilsMessengerCallbackDataEXT & callbackData, Dispatch const &d = Dispatch() ) const;
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
-
-    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkInstance() const
-    {
-      return m_instance;
-    }
-
-    explicit operator bool() const
-    {
-      return m_instance != VK_NULL_HANDLE;
-    }
-
-    bool operator!() const
-    {
-      return m_instance == VK_NULL_HANDLE;
-    }
-
-  private:
-    VkInstance m_instance;
-  };
-
-  static_assert( sizeof( Instance ) == sizeof( VkInstance ), "handle and wrapper have different size!" );
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Instance::destroy( const AllocationCallbacks* pAllocator, Dispatch const &d) const
+#ifdef VK_USE_PLATFORM_METAL_EXT
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
-    d.vkDestroyInstance( m_instance, reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+    return static_cast<Result>( d.vkCreateMetalSurfaceEXT( m_instance, reinterpret_cast<const VkMetalSurfaceCreateInfoEXT *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkSurfaceKHR *>( pSurface ) ) );
   }
+
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Instance::destroy( Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createMetalSurfaceEXT( const MetalSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
   {
-    d.vkDestroyInstance( m_instance, reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    Result result = static_cast<Result>( d.vkCreateMetalSurfaceEXT( m_instance, reinterpret_cast<const VkMetalSurfaceCreateInfoEXT *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkSurfaceKHR *>( &surface ) ) );
+    return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMetalSurfaceEXT" );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type Instance::createMetalSurfaceEXTUnique( const MetalSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    Result result = static_cast<Result>( d.vkCreateMetalSurfaceEXT( m_instance, reinterpret_cast<const VkMetalSurfaceCreateInfoEXT *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkSurfaceKHR *>( &surface ) ) );
+    ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
+    return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMetalSurfaceEXTUnique", deleter );
+  }
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+
+#ifdef VK_USE_PLATFORM_GGP
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createStreamDescriptorSurfaceGGP( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkCreateStreamDescriptorSurfaceGGP( m_instance, reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkSurfaceKHR *>( pSurface ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createStreamDescriptorSurfaceGGP( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    Result result = static_cast<Result>( d.vkCreateStreamDescriptorSurfaceGGP( m_instance, reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkSurfaceKHR *>( &surface ) ) );
+    return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGP" );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type Instance::createStreamDescriptorSurfaceGGPUnique( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    Result result = static_cast<Result>( d.vkCreateStreamDescriptorSurfaceGGP( m_instance, reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkSurfaceKHR *>( &surface ) ) );
+    ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
+    return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGPUnique", deleter );
+  }
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_GGP*/
+
+
+#ifdef VK_USE_PLATFORM_VI_NN
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkCreateViSurfaceNN( m_instance, reinterpret_cast<const VkViSurfaceCreateInfoNN *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkSurfaceKHR *>( pSurface ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createViSurfaceNN( const ViSurfaceCreateInfoNN & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    Result result = static_cast<Result>( d.vkCreateViSurfaceNN( m_instance, reinterpret_cast<const VkViSurfaceCreateInfoNN *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkSurfaceKHR *>( &surface ) ) );
+    return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNN" );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type Instance::createViSurfaceNNUnique( const ViSurfaceCreateInfoNN & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    Result result = static_cast<Result>( d.vkCreateViSurfaceNN( m_instance, reinterpret_cast<const VkViSurfaceCreateInfoNN *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkSurfaceKHR *>( &surface ) ) );
+    ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
+    return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNNUnique", deleter );
+  }
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_VI_NN*/
+
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkCreateWaylandSurfaceKHR( m_instance, reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkSurfaceKHR *>( pSurface ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    Result result = static_cast<Result>( d.vkCreateWaylandSurfaceKHR( m_instance, reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkSurfaceKHR *>( &surface ) ) );
+    return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHR" );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type Instance::createWaylandSurfaceKHRUnique( const WaylandSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    Result result = static_cast<Result>( d.vkCreateWaylandSurfaceKHR( m_instance, reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkSurfaceKHR *>( &surface ) ) );
+    ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
+    return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHRUnique", deleter );
+  }
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkCreateWin32SurfaceKHR( m_instance, reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkSurfaceKHR *>( pSurface ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    Result result = static_cast<Result>( d.vkCreateWin32SurfaceKHR( m_instance, reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkSurfaceKHR *>( &surface ) ) );
+    return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHR" );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type Instance::createWin32SurfaceKHRUnique( const Win32SurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    Result result = static_cast<Result>( d.vkCreateWin32SurfaceKHR( m_instance, reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkSurfaceKHR *>( &surface ) ) );
+    ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
+    return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHRUnique", deleter );
+  }
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkCreateXcbSurfaceKHR( m_instance, reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkSurfaceKHR *>( pSurface ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    Result result = static_cast<Result>( d.vkCreateXcbSurfaceKHR( m_instance, reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkSurfaceKHR *>( &surface ) ) );
+    return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHR" );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type Instance::createXcbSurfaceKHRUnique( const XcbSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    Result result = static_cast<Result>( d.vkCreateXcbSurfaceKHR( m_instance, reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkSurfaceKHR *>( &surface ) ) );
+    ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
+    return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHRUnique", deleter );
+  }
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkCreateXlibSurfaceKHR( m_instance, reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkSurfaceKHR *>( pSurface ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    Result result = static_cast<Result>( d.vkCreateXlibSurfaceKHR( m_instance, reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkSurfaceKHR *>( &surface ) ) );
+    return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHR" );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type Instance::createXlibSurfaceKHRUnique( const XlibSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    Result result = static_cast<Result>( d.vkCreateXlibSurfaceKHR( m_instance, reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkSurfaceKHR *>( &surface ) ) );
+    ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
+    return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHRUnique", deleter );
+  }
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDebugReportMessageEXT( m_instance, static_cast<VkDebugReportFlagsEXT>( flags ), static_cast<VkDebugReportObjectTypeEXT>( objectType ), object, location, messageCode, pLayerPrefix, pMessage );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const std::string & layerPrefix, const std::string & message, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDebugReportMessageEXT( m_instance, static_cast<VkDebugReportFlagsEXT>( flags ), static_cast<VkDebugReportObjectTypeEXT>( objectType ), object, location, messageCode, layerPrefix.c_str(), message.c_str() );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast<VkDebugUtilsMessengerEXT>( messenger ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast<VkDebugUtilsMessengerEXT>( messenger ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast<VkDebugUtilsMessengerEXT>( messenger ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast<VkDebugUtilsMessengerEXT>( messenger ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Instance::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyInstance( m_instance, reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Instance::destroy( Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyInstance( m_instance, reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
   }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
   template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDevices( uint32_t* pPhysicalDeviceCount, PhysicalDevice* pPhysicalDevices, Dispatch const &d) const
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDeviceGroups( uint32_t* pPhysicalDeviceGroupCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups( m_instance, pPhysicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties*>( pPhysicalDeviceGroupProperties ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator , typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDeviceGroupProperties,Allocator>>::type Instance::enumeratePhysicalDeviceGroups(Dispatch const &d ) const
+  {
+    std::vector<PhysicalDeviceGroupProperties,Allocator> physicalDeviceGroupProperties;
+    uint32_t physicalDeviceGroupCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount )
+      {
+        physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
+        result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties*>( physicalDeviceGroupProperties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
+      physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
+    }
+    return createResultValue( result, physicalDeviceGroupProperties, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDeviceGroups" );
+  }
+  template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, PhysicalDeviceGroupProperties>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDeviceGroupProperties,Allocator>>::type Instance::enumeratePhysicalDeviceGroups(Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<PhysicalDeviceGroupProperties,Allocator> physicalDeviceGroupProperties( vectorAllocator );
+    uint32_t physicalDeviceGroupCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount )
+      {
+        physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
+        result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties*>( physicalDeviceGroupProperties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
+      physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
+    }
+    return createResultValue( result, physicalDeviceGroupProperties, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDeviceGroups" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDeviceGroupsKHR( uint32_t* pPhysicalDeviceGroupCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, pPhysicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties*>( pPhysicalDeviceGroupProperties ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator , typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDeviceGroupProperties,Allocator>>::type Instance::enumeratePhysicalDeviceGroupsKHR(Dispatch const &d ) const
+  {
+    std::vector<PhysicalDeviceGroupProperties,Allocator> physicalDeviceGroupProperties;
+    uint32_t physicalDeviceGroupCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount )
+      {
+        physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
+        result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties*>( physicalDeviceGroupProperties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
+      physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
+    }
+    return createResultValue( result, physicalDeviceGroupProperties, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDeviceGroupsKHR" );
+  }
+  template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, PhysicalDeviceGroupProperties>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDeviceGroupProperties,Allocator>>::type Instance::enumeratePhysicalDeviceGroupsKHR(Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<PhysicalDeviceGroupProperties,Allocator> physicalDeviceGroupProperties( vectorAllocator );
+    uint32_t physicalDeviceGroupCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount )
+      {
+        physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
+        result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties*>( physicalDeviceGroupProperties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
+      physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
+    }
+    return createResultValue( result, physicalDeviceGroupProperties, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDeviceGroupsKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDevices( uint32_t* pPhysicalDeviceCount, VULKAN_HPP_NAMESPACE::PhysicalDevice* pPhysicalDevices, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
   {
     return static_cast<Result>( d.vkEnumeratePhysicalDevices( m_instance, pPhysicalDeviceCount, reinterpret_cast<VkPhysicalDevice*>( pPhysicalDevices ) ) );
   }
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDevice,Allocator>>::type Instance::enumeratePhysicalDevices(Dispatch const &d ) const
+  template<typename Allocator , typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDevice,Allocator>>::type Instance::enumeratePhysicalDevices(Dispatch const &d ) const
   {
     std::vector<PhysicalDevice,Allocator> physicalDevices;
     uint32_t physicalDeviceCount;
@@ -47647,12 +88714,15 @@
         result = static_cast<Result>( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, reinterpret_cast<VkPhysicalDevice*>( physicalDevices.data() ) ) );
       }
     } while ( result == Result::eIncomplete );
-    VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() );
-    physicalDevices.resize( physicalDeviceCount );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() );
+      physicalDevices.resize( physicalDeviceCount );
+    }
     return createResultValue( result, physicalDevices, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDevices" );
   }
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDevice,Allocator>>::type Instance::enumeratePhysicalDevices(Allocator const& vectorAllocator, Dispatch const &d ) const
+  template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, PhysicalDevice>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDevice,Allocator>>::type Instance::enumeratePhysicalDevices(Allocator const& vectorAllocator, Dispatch const &d ) const
   {
     std::vector<PhysicalDevice,Allocator> physicalDevices( vectorAllocator );
     uint32_t physicalDeviceCount;
@@ -47666,4656 +88736,2947 @@
         result = static_cast<Result>( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, reinterpret_cast<VkPhysicalDevice*>( physicalDevices.data() ) ) );
       }
     } while ( result == Result::eIncomplete );
-    VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() );
-    physicalDevices.resize( physicalDeviceCount );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() );
+      physicalDevices.resize( physicalDeviceCount );
+    }
     return createResultValue( result, physicalDevices, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDevices" );
   }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const char* pName, Dispatch const &d) const
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const char* pName, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
     return d.vkGetInstanceProcAddr( m_instance, pName );
   }
+
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const std::string & name, Dispatch const &d ) const
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const std::string & name, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
   {
     return d.vkGetInstanceProcAddr( m_instance, name.c_str() );
   }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Instance::createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkCreateAndroidSurfaceKHR( m_instance, reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<SurfaceKHR>::type Instance::createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    SurfaceKHR surface;
-    Result result = static_cast<Result>( d.vkCreateAndroidSurfaceKHR( m_instance, reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
-    return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createAndroidSurfaceKHR" );
-  }
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type Instance::createAndroidSurfaceKHRUnique( const AndroidSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    SurfaceKHR surface;
-    Result result = static_cast<Result>( d.vkCreateAndroidSurfaceKHR( m_instance, reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
 
-    ObjectDestroy<Instance,Dispatch> deleter( *this, allocator, d );
-    return createResultValue<SurfaceKHR,Dispatch>( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createAndroidSurfaceKHRUnique", deleter );
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT* pCallbackData, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkSubmitDebugUtilsMessageEXT( m_instance, static_cast<VkDebugUtilsMessageSeverityFlagBitsEXT>( messageSeverity ), static_cast<VkDebugUtilsMessageTypeFlagsEXT>( messageTypes ), reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT *>( pCallbackData ) );
   }
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, const DebugUtilsMessengerCallbackDataEXT & callbackData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkSubmitDebugUtilsMessageEXT( m_instance, static_cast<VkDebugUtilsMessageSeverityFlagBitsEXT>( messageSeverity ), static_cast<VkDebugUtilsMessageTypeFlagsEXT>( messageTypes ), reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT *>( &callbackData ) );
+  }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Instance::createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d) const
+
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireXlibDisplayEXT( Display* dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
-    return static_cast<Result>( d.vkCreateDisplayPlaneSurfaceKHR( m_instance, reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
+    return static_cast<Result>( d.vkAcquireXlibDisplayEXT( m_physicalDevice, dpy, static_cast<VkDisplayKHR>( display ) ) );
   }
+
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<SurfaceKHR>::type Instance::createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type PhysicalDevice::acquireXlibDisplayEXT( Display & dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const
   {
-    SurfaceKHR surface;
-    Result result = static_cast<Result>( d.vkCreateDisplayPlaneSurfaceKHR( m_instance, reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
-    return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createDisplayPlaneSurfaceKHR" );
+    Result result = static_cast<Result>( d.vkAcquireXlibDisplayEXT( m_physicalDevice, &dpy, static_cast<VkDisplayKHR>( display ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireXlibDisplayEXT" );
   }
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type Instance::createDisplayPlaneSurfaceKHRUnique( const DisplaySurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    SurfaceKHR surface;
-    Result result = static_cast<Result>( d.vkCreateDisplayPlaneSurfaceKHR( m_instance, reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
 
-    ObjectDestroy<Instance,Dispatch> deleter( *this, allocator, d );
-    return createResultValue<SurfaceKHR,Dispatch>( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createDisplayPlaneSurfaceKHRUnique", deleter );
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Device* pDevice, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkCreateDevice( m_physicalDevice, reinterpret_cast<const VkDeviceCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkDevice *>( pDevice ) ) );
   }
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Device>::type PhysicalDevice::createDevice( const DeviceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::Device device;
+    Result result = static_cast<Result>( d.vkCreateDevice( m_physicalDevice, reinterpret_cast<const VkDeviceCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkDevice *>( &device ) ) );
+    return createResultValue( result, device, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDevice" );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Device, Dispatch>>::type PhysicalDevice::createDeviceUnique( const DeviceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::Device device;
+    Result result = static_cast<Result>( d.vkCreateDevice( m_physicalDevice, reinterpret_cast<const VkDeviceCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkDevice *>( &device ) ) );
+    ObjectDestroy<NoParent, Dispatch> deleter( allocator, d );
+    return createResultValue<VULKAN_HPP_NAMESPACE::Device, Dispatch>( result, device, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDeviceUnique", deleter );
+  }
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DisplayModeKHR* pMode, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkCreateDisplayModeKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkDisplayModeKHR *>( pMode ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayModeKHR>::type PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayModeCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::DisplayModeKHR mode;
+    Result result = static_cast<Result>( d.vkCreateDisplayModeKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkDisplayModeKHR *>( &mode ) ) );
+    return createResultValue( result, mode, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHR" );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayModeKHR, Dispatch>>::type PhysicalDevice::createDisplayModeKHRUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayModeCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::DisplayModeKHR mode;
+    Result result = static_cast<Result>( d.vkCreateDisplayModeKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast< VkDisplayModeKHR *>( &mode ) ) );
+    ObjectDestroy<PhysicalDevice, Dispatch> deleter( *this, allocator, d );
+    return createResultValue<VULKAN_HPP_NAMESPACE::DisplayModeKHR, Dispatch>( result, mode, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHRUnique", deleter );
+  }
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
   template<typename Dispatch>
-  VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( SurfaceKHR surface, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::ExtensionProperties* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
   {
-    d.vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+    return static_cast<Result>( d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties*>( pProperties ) ) );
   }
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( SurfaceKHR surface, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  template<typename Allocator , typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<ExtensionProperties,Allocator>>::type PhysicalDevice::enumerateDeviceExtensionProperties( Optional<const std::string> layerName, Dispatch const &d ) const
   {
-    d.vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+    std::vector<ExtensionProperties,Allocator> properties;
+    uint32_t propertyCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = static_cast<Result>( d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties*>( properties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+      properties.resize( propertyCount );
+    }
+    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateDeviceExtensionProperties" );
+  }
+  template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, ExtensionProperties>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<ExtensionProperties,Allocator>>::type PhysicalDevice::enumerateDeviceExtensionProperties( Optional<const std::string> layerName, Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<ExtensionProperties,Allocator> properties( vectorAllocator );
+    uint32_t propertyCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = static_cast<Result>( d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties*>( properties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+      properties.resize( propertyCount );
+    }
+    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateDeviceExtensionProperties" );
   }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
   template<typename Dispatch>
-  VULKAN_HPP_INLINE void Instance::destroy( SurfaceKHR surface, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceLayerProperties( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::LayerProperties* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
   {
-    d.vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+    return static_cast<Result>( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, pPropertyCount, reinterpret_cast<VkLayerProperties*>( pProperties ) ) );
   }
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Instance::destroy( SurfaceKHR surface, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  template<typename Allocator , typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<LayerProperties,Allocator>>::type PhysicalDevice::enumerateDeviceLayerProperties(Dispatch const &d ) const
   {
-    d.vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+    std::vector<LayerProperties,Allocator> properties;
+    uint32_t propertyCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = static_cast<Result>( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, reinterpret_cast<VkLayerProperties*>( properties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+      properties.resize( propertyCount );
+    }
+    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateDeviceLayerProperties" );
+  }
+  template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, LayerProperties>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<LayerProperties,Allocator>>::type PhysicalDevice::enumerateDeviceLayerProperties(Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<LayerProperties,Allocator> properties( vectorAllocator );
+    uint32_t propertyCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = static_cast<Result>( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, reinterpret_cast<VkLayerProperties*>( properties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+      properties.resize( propertyCount );
+    }
+    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateDeviceLayerProperties" );
   }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-#ifdef VK_USE_PLATFORM_VI_NN
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Instance::createViSurfaceNN( const ViSurfaceCreateInfoNN* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d) const
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, uint32_t* pCounterCount, VULKAN_HPP_NAMESPACE::PerformanceCounterKHR* pCounters, VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR* pCounterDescriptions, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
-    return static_cast<Result>( d.vkCreateViSurfaceNN( m_instance, reinterpret_cast<const VkViSurfaceCreateInfoNN*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
+    return static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, pCounterCount, reinterpret_cast< VkPerformanceCounterKHR *>( pCounters ), reinterpret_cast< VkPerformanceCounterDescriptionKHR *>( pCounterDescriptions ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Allocator, typename Dispatch>
+  VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it.")
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PerformanceCounterDescriptionKHR,Allocator>>::type PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR(  uint32_t queueFamilyIndex, ArrayProxy<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR> const &counters, Dispatch const &d  ) const
+  {
+        std::vector<PerformanceCounterDescriptionKHR,Allocator> counterDescriptions;
+    uint32_t counterCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, counters.size() , reinterpret_cast<VkPerformanceCounterKHR*>( counters.data() ), nullptr ) );
+      if ( ( result == Result::eSuccess ) && counterCount )
+      {
+        counterDescriptions.resize( counterCount );
+        result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, counters.size() , reinterpret_cast<VkPerformanceCounterKHR*>( counters.data() ), reinterpret_cast<VkPerformanceCounterDescriptionKHR*>( counterDescriptions.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( counterCount <= counterDescriptions.size() );
+      counterDescriptions.resize( counterCount );
+    }
+    return createResultValue( result, counterDescriptions, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" );
+
+  }
+
+  template <typename Allocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, PerformanceCounterDescriptionKHR>::value, int>::type>
+  VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it.")
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PerformanceCounterDescriptionKHR,Allocator>>::type PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR(  uint32_t queueFamilyIndex, ArrayProxy<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR> const &counters, Allocator const& vectorAllocator, Dispatch const &d  ) const
+  {
+        std::vector<PerformanceCounterDescriptionKHR,Allocator> counterDescriptions( vectorAllocator );
+    uint32_t counterCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, counters.size() , reinterpret_cast<VkPerformanceCounterKHR*>( counters.data() ), nullptr ) );
+      if ( ( result == Result::eSuccess ) && counterCount )
+      {
+        counterDescriptions.resize( counterCount );
+        result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, counters.size() , reinterpret_cast<VkPerformanceCounterKHR*>( counters.data() ), reinterpret_cast<VkPerformanceCounterDescriptionKHR*>( counterDescriptions.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( counterCount <= counterDescriptions.size() );
+      counterDescriptions.resize( counterCount );
+    }
+    return createResultValue( result, counterDescriptions, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" );
+
+  }
+
+  template <typename PerformanceCounterKHRAllocator, typename PerformanceCounterDescriptionKHRAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::pair<std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator>, std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>>::type PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, Dispatch const & d ) const
+  {
+    std::pair<std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator>, std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>> enumeratedData;
+    uint32_t counterCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, &counterCount, nullptr, nullptr ) );
+      if ( ( result == Result::eSuccess ) && counterCount )
+      {
+        enumeratedData.first.resize( counterCount );
+        enumeratedData.second.resize( counterCount );
+        result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, &counterCount, reinterpret_cast<VkPerformanceCounterKHR *>( enumeratedData.first.data() ), reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( enumeratedData.second.data() ) ) );
+        VULKAN_HPP_ASSERT( counterCount <= enumeratedData.first.size() );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( ( result == Result::eSuccess ) && ( counterCount < enumeratedData.first.size() ) )
+    {
+      enumeratedData.first.resize( counterCount );
+      enumeratedData.second.resize( counterCount );
+    }
+    return createResultValue( result, enumeratedData, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" );
+  }
+
+  template <typename PerformanceCounterKHRAllocator, typename PerformanceCounterDescriptionKHRAllocator, typename Dispatch, typename B1, typename B2, typename std::enable_if<std::is_same<typename B1::value_type, PerformanceCounterKHR>::value && std::is_same<typename B2::value_type, PerformanceCounterDescriptionKHR>::value, int>::type >
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::pair<std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator>, std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>>::type PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, PerformanceCounterKHRAllocator & performanceCounterKHRAllocator, PerformanceCounterDescriptionKHRAllocator & performanceCounterDescriptionKHRAllocator, Dispatch const & d ) const
+  {
+    std::pair<std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator>, std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>> enumeratedData( std::piecewise_construct, std::forward_as_tuple( performanceCounterKHRAllocator ), std::forward_as_tuple( performanceCounterDescriptionKHRAllocator ) );
+    uint32_t counterCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, &counterCount, nullptr, nullptr ) );
+      if ( ( result == Result::eSuccess ) && counterCount )
+      {
+        enumeratedData.first.resize( counterCount );
+        enumeratedData.second.resize( counterCount );
+        result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, &counterCount, reinterpret_cast<VkPerformanceCounterKHR *>( enumeratedData.first.data() ), reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( enumeratedData.second.data() ) ) );
+        VULKAN_HPP_ASSERT( counterCount <= enumeratedData.first.size() );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( ( result == Result::eSuccess ) && ( counterCount < enumeratedData.first.size() ) )
+    {
+      enumeratedData.first.resize( counterCount );
+      enumeratedData.second.resize( counterCount );
+    }
+    return createResultValue( result, enumeratedData, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), pPropertyCount, reinterpret_cast<VkDisplayModeProperties2KHR*>( pProperties ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator , typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayModeProperties2KHR,Allocator>>::type PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d ) const
+  {
+    std::vector<DisplayModeProperties2KHR,Allocator> properties;
+    uint32_t propertyCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = static_cast<Result>( d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModeProperties2KHR*>( properties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+      properties.resize( propertyCount );
+    }
+    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayModeProperties2KHR" );
+  }
+  template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, DisplayModeProperties2KHR>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayModeProperties2KHR,Allocator>>::type PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<DisplayModeProperties2KHR,Allocator> properties( vectorAllocator );
+    uint32_t propertyCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = static_cast<Result>( d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModeProperties2KHR*>( properties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+      properties.resize( propertyCount );
+    }
+    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayModeProperties2KHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), pPropertyCount, reinterpret_cast<VkDisplayModePropertiesKHR*>( pProperties ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator , typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayModePropertiesKHR,Allocator>>::type PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d ) const
+  {
+    std::vector<DisplayModePropertiesKHR,Allocator> properties;
+    uint32_t propertyCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = static_cast<Result>( d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModePropertiesKHR*>( properties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+      properties.resize( propertyCount );
+    }
+    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayModePropertiesKHR" );
+  }
+  template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, DisplayModePropertiesKHR>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayModePropertiesKHR,Allocator>>::type PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<DisplayModePropertiesKHR,Allocator> properties( vectorAllocator );
+    uint32_t propertyCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = static_cast<Result>( d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModePropertiesKHR*>( properties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+      properties.resize( propertyCount );
+    }
+    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayModePropertiesKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR* pDisplayPlaneInfo, VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR* pCapabilities, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice, reinterpret_cast<const VkDisplayPlaneInfo2KHR *>( pDisplayPlaneInfo ), reinterpret_cast< VkDisplayPlaneCapabilities2KHR *>( pCapabilities ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR>::type PhysicalDevice::getDisplayPlaneCapabilities2KHR( const DisplayPlaneInfo2KHR & displayPlaneInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR capabilities;
+    Result result = static_cast<Result>( d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice, reinterpret_cast<const VkDisplayPlaneInfo2KHR *>( &displayPlaneInfo ), reinterpret_cast< VkDisplayPlaneCapabilities2KHR *>( &capabilities ) ) );
+    return createResultValue( result, capabilities, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilities2KHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR* pCapabilities, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkGetDisplayPlaneCapabilitiesKHR( m_physicalDevice, static_cast<VkDisplayModeKHR>( mode ), planeIndex, reinterpret_cast< VkDisplayPlaneCapabilitiesKHR *>( pCapabilities ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR>::type PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities;
+    Result result = static_cast<Result>( d.vkGetDisplayPlaneCapabilitiesKHR( m_physicalDevice, static_cast<VkDisplayModeKHR>( mode ), planeIndex, reinterpret_cast< VkDisplayPlaneCapabilitiesKHR *>( &capabilities ) ) );
+    return createResultValue( result, capabilities, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilitiesKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, uint32_t* pDisplayCount, VULKAN_HPP_NAMESPACE::DisplayKHR* pDisplays, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, pDisplayCount, reinterpret_cast<VkDisplayKHR*>( pDisplays ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator , typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayKHR,Allocator>>::type PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const &d ) const
+  {
+    std::vector<DisplayKHR,Allocator> displays;
+    uint32_t displayCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && displayCount )
+      {
+        displays.resize( displayCount );
+        result = static_cast<Result>( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, reinterpret_cast<VkDisplayKHR*>( displays.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( displayCount <= displays.size() );
+      displays.resize( displayCount );
+    }
+    return createResultValue( result, displays, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" );
+  }
+  template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, DisplayKHR>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayKHR,Allocator>>::type PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<DisplayKHR,Allocator> displays( vectorAllocator );
+    uint32_t displayCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && displayCount )
+      {
+        displays.resize( displayCount );
+        result = static_cast<Result>( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, reinterpret_cast<VkDisplayKHR*>( displays.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( displayCount <= displays.size() );
+      displays.resize( displayCount );
+    }
+    return createResultValue( result, displays, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getCalibrateableTimeDomainsEXT( uint32_t* pTimeDomainCount, VULKAN_HPP_NAMESPACE::TimeDomainEXT* pTimeDomains, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, pTimeDomainCount, reinterpret_cast<VkTimeDomainEXT*>( pTimeDomains ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator , typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<TimeDomainEXT,Allocator>>::type PhysicalDevice::getCalibrateableTimeDomainsEXT(Dispatch const &d ) const
+  {
+    std::vector<TimeDomainEXT,Allocator> timeDomains;
+    uint32_t timeDomainCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && timeDomainCount )
+      {
+        timeDomains.resize( timeDomainCount );
+        result = static_cast<Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, reinterpret_cast<VkTimeDomainEXT*>( timeDomains.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() );
+      timeDomains.resize( timeDomainCount );
+    }
+    return createResultValue( result, timeDomains, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getCalibrateableTimeDomainsEXT" );
+  }
+  template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, TimeDomainEXT>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<TimeDomainEXT,Allocator>>::type PhysicalDevice::getCalibrateableTimeDomainsEXT(Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<TimeDomainEXT,Allocator> timeDomains( vectorAllocator );
+    uint32_t timeDomainCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && timeDomainCount )
+      {
+        timeDomains.resize( timeDomainCount );
+        result = static_cast<Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, reinterpret_cast<VkTimeDomainEXT*>( timeDomains.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() );
+      timeDomains.resize( timeDomainCount );
+    }
+    return createResultValue( result, timeDomains, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getCalibrateableTimeDomainsEXT" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getCooperativeMatrixPropertiesNV( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, pPropertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesNV*>( pProperties ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator , typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<CooperativeMatrixPropertiesNV,Allocator>>::type PhysicalDevice::getCooperativeMatrixPropertiesNV(Dispatch const &d ) const
+  {
+    std::vector<CooperativeMatrixPropertiesNV,Allocator> properties;
+    uint32_t propertyCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesNV*>( properties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+      properties.resize( propertyCount );
+    }
+    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getCooperativeMatrixPropertiesNV" );
+  }
+  template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, CooperativeMatrixPropertiesNV>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<CooperativeMatrixPropertiesNV,Allocator>>::type PhysicalDevice::getCooperativeMatrixPropertiesNV(Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<CooperativeMatrixPropertiesNV,Allocator> properties( vectorAllocator );
+    uint32_t propertyCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesNV*>( properties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+      properties.resize( propertyCount );
+    }
+    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getCooperativeMatrixPropertiesNV" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_DIRECTFB_EXT
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Bool32 PhysicalDevice::getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, IDirectFB* dfb, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Bool32>( d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( m_physicalDevice, queueFamilyIndex, dfb ) );
   }
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
   template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<SurfaceKHR>::type Instance::createViSurfaceNN( const ViSurfaceCreateInfoNN & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  VULKAN_HPP_INLINE Bool32 PhysicalDevice::getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, IDirectFB & dfb, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
   {
-    SurfaceKHR surface;
-    Result result = static_cast<Result>( d.vkCreateViSurfaceNN( m_instance, reinterpret_cast<const VkViSurfaceCreateInfoNN*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
-    return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createViSurfaceNN" );
+    return d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( m_physicalDevice, queueFamilyIndex, &dfb );
   }
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type Instance::createViSurfaceNNUnique( const ViSurfaceCreateInfoNN & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    SurfaceKHR surface;
-    Result result = static_cast<Result>( d.vkCreateViSurfaceNN( m_instance, reinterpret_cast<const VkViSurfaceCreateInfoNN*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
-
-    ObjectDestroy<Instance,Dispatch> deleter( *this, allocator, d );
-    return createResultValue<SurfaceKHR,Dispatch>( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createViSurfaceNNUnique", deleter );
-  }
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_VI_NN*/
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneProperties2KHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR*>( pProperties ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator , typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayPlaneProperties2KHR,Allocator>>::type PhysicalDevice::getDisplayPlaneProperties2KHR(Dispatch const &d ) const
+  {
+    std::vector<DisplayPlaneProperties2KHR,Allocator> properties;
+    uint32_t propertyCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR*>( properties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+      properties.resize( propertyCount );
+    }
+    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlaneProperties2KHR" );
+  }
+  template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, DisplayPlaneProperties2KHR>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayPlaneProperties2KHR,Allocator>>::type PhysicalDevice::getDisplayPlaneProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<DisplayPlaneProperties2KHR,Allocator> properties( vectorAllocator );
+    uint32_t propertyCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR*>( properties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+      properties.resize( propertyCount );
+    }
+    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlaneProperties2KHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlanePropertiesKHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR*>( pProperties ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator , typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayPlanePropertiesKHR,Allocator>>::type PhysicalDevice::getDisplayPlanePropertiesKHR(Dispatch const &d ) const
+  {
+    std::vector<DisplayPlanePropertiesKHR,Allocator> properties;
+    uint32_t propertyCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR*>( properties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+      properties.resize( propertyCount );
+    }
+    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlanePropertiesKHR" );
+  }
+  template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, DisplayPlanePropertiesKHR>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayPlanePropertiesKHR,Allocator>>::type PhysicalDevice::getDisplayPlanePropertiesKHR(Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<DisplayPlanePropertiesKHR,Allocator> properties( vectorAllocator );
+    uint32_t propertyCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR*>( properties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+      properties.resize( propertyCount );
+    }
+    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlanePropertiesKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayProperties2KHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayProperties2KHR* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayProperties2KHR*>( pProperties ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator , typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayProperties2KHR,Allocator>>::type PhysicalDevice::getDisplayProperties2KHR(Dispatch const &d ) const
+  {
+    std::vector<DisplayProperties2KHR,Allocator> properties;
+    uint32_t propertyCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayProperties2KHR*>( properties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+      properties.resize( propertyCount );
+    }
+    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayProperties2KHR" );
+  }
+  template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, DisplayProperties2KHR>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayProperties2KHR,Allocator>>::type PhysicalDevice::getDisplayProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<DisplayProperties2KHR,Allocator> properties( vectorAllocator );
+    uint32_t propertyCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayProperties2KHR*>( properties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+      properties.resize( propertyCount );
+    }
+    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayProperties2KHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPropertiesKHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPropertiesKHR*>( pProperties ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator , typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayPropertiesKHR,Allocator>>::type PhysicalDevice::getDisplayPropertiesKHR(Dispatch const &d ) const
+  {
+    std::vector<DisplayPropertiesKHR,Allocator> properties;
+    uint32_t propertyCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPropertiesKHR*>( properties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+      properties.resize( propertyCount );
+    }
+    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPropertiesKHR" );
+  }
+  template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, DisplayPropertiesKHR>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayPropertiesKHR,Allocator>>::type PhysicalDevice::getDisplayPropertiesKHR(Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<DisplayPropertiesKHR,Allocator> properties( vectorAllocator );
+    uint32_t propertyCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPropertiesKHR*>( properties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+      properties.resize( propertyCount );
+    }
+    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPropertiesKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VULKAN_HPP_NAMESPACE::ExternalBufferProperties* pExternalBufferProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetPhysicalDeviceExternalBufferProperties( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( pExternalBufferInfo ), reinterpret_cast< VkExternalBufferProperties *>( pExternalBufferProperties ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties PhysicalDevice::getExternalBufferProperties( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties;
+    d.vkGetPhysicalDeviceExternalBufferProperties( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( &externalBufferInfo ), reinterpret_cast< VkExternalBufferProperties *>( &externalBufferProperties ) );
+    return externalBufferProperties;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VULKAN_HPP_NAMESPACE::ExternalBufferProperties* pExternalBufferProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( pExternalBufferInfo ), reinterpret_cast< VkExternalBufferProperties *>( pExternalBufferProperties ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties PhysicalDevice::getExternalBufferPropertiesKHR( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties;
+    d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( &externalBufferInfo ), reinterpret_cast< VkExternalBufferProperties *>( &externalBufferProperties ) );
+    return externalBufferProperties;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VULKAN_HPP_NAMESPACE::ExternalFenceProperties* pExternalFenceProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetPhysicalDeviceExternalFenceProperties( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( pExternalFenceInfo ), reinterpret_cast< VkExternalFenceProperties *>( pExternalFenceProperties ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties PhysicalDevice::getExternalFenceProperties( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties;
+    d.vkGetPhysicalDeviceExternalFenceProperties( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( &externalFenceInfo ), reinterpret_cast< VkExternalFenceProperties *>( &externalFenceProperties ) );
+    return externalFenceProperties;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VULKAN_HPP_NAMESPACE::ExternalFenceProperties* pExternalFenceProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetPhysicalDeviceExternalFencePropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( pExternalFenceInfo ), reinterpret_cast< VkExternalFenceProperties *>( pExternalFenceProperties ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties PhysicalDevice::getExternalFencePropertiesKHR( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties;
+    d.vkGetPhysicalDeviceExternalFencePropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( &externalFenceInfo ), reinterpret_cast< VkExternalFenceProperties *>( &externalFenceProperties ) );
+    return externalFenceProperties;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV* pExternalImageFormatProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkImageTiling>( tiling ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageCreateFlags>( flags ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( externalHandleType ), reinterpret_cast< VkExternalImageFormatPropertiesNV *>( pExternalImageFormatProperties ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV>::type PhysicalDevice::getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV externalImageFormatProperties;
+    Result result = static_cast<Result>( d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkImageTiling>( tiling ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageCreateFlags>( flags ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( externalHandleType ), reinterpret_cast< VkExternalImageFormatPropertiesNV *>( &externalImageFormatProperties ) ) );
+    return createResultValue( result, externalImageFormatProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getExternalImageFormatPropertiesNV" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties* pExternalSemaphoreProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetPhysicalDeviceExternalSemaphoreProperties( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( pExternalSemaphoreInfo ), reinterpret_cast< VkExternalSemaphoreProperties *>( pExternalSemaphoreProperties ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties PhysicalDevice::getExternalSemaphoreProperties( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties;
+    d.vkGetPhysicalDeviceExternalSemaphoreProperties( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( &externalSemaphoreInfo ), reinterpret_cast< VkExternalSemaphoreProperties *>( &externalSemaphoreProperties ) );
+    return externalSemaphoreProperties;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getExternalSemaphorePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties* pExternalSemaphoreProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( pExternalSemaphoreInfo ), reinterpret_cast< VkExternalSemaphoreProperties *>( pExternalSemaphoreProperties ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties PhysicalDevice::getExternalSemaphorePropertiesKHR( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties;
+    d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( &externalSemaphoreInfo ), reinterpret_cast< VkExternalSemaphoreProperties *>( &externalSemaphoreProperties ) );
+    return externalSemaphoreProperties;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures* pFeatures, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast< VkPhysicalDeviceFeatures *>( pFeatures ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures PhysicalDevice::getFeatures( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features;
+    d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast< VkPhysicalDeviceFeatures *>( &features ) );
+    return features;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2* pFeatures, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast< VkPhysicalDeviceFeatures2 *>( pFeatures ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 PhysicalDevice::getFeatures2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features;
+    d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast< VkPhysicalDeviceFeatures2 *>( &features ) );
+    return features;
+  }
+
+  template <typename X, typename Y, typename... Z, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getFeatures2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>();
+    d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast< VkPhysicalDeviceFeatures2 *>( &features ) );
+    return structureChain;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2* pFeatures, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast< VkPhysicalDeviceFeatures2 *>( pFeatures ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 PhysicalDevice::getFeatures2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features;
+    d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast< VkPhysicalDeviceFeatures2 *>( &features ) );
+    return features;
+  }
+
+  template <typename X, typename Y, typename... Z, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getFeatures2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>();
+    d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast< VkPhysicalDeviceFeatures2 *>( &features ) );
+    return structureChain;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties* pFormatProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast< VkFormatProperties *>( pFormatProperties ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::FormatProperties formatProperties;
+    d.vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast< VkFormatProperties *>( &formatProperties ) );
+    return formatProperties;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties2* pFormatProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast< VkFormatProperties2 *>( pFormatProperties ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2 PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties;
+    d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast< VkFormatProperties2 *>( &formatProperties ) );
+    return formatProperties;
+  }
+
+  template <typename X, typename Y, typename... Z, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::FormatProperties2>();
+    d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast< VkFormatProperties2 *>( &formatProperties ) );
+    return structureChain;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties2* pFormatProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetPhysicalDeviceFormatProperties2KHR( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast< VkFormatProperties2 *>( pFormatProperties ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2 PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties;
+    d.vkGetPhysicalDeviceFormatProperties2KHR( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast< VkFormatProperties2 *>( &formatProperties ) );
+    return formatProperties;
+  }
+
+  template <typename X, typename Y, typename... Z, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::FormatProperties2>();
+    d.vkGetPhysicalDeviceFormatProperties2KHR( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast< VkFormatProperties2 *>( &formatProperties ) );
+    return structureChain;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getFragmentShadingRatesKHR( uint32_t* pFragmentShadingRateCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR* pFragmentShadingRates, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, pFragmentShadingRateCount, reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR*>( pFragmentShadingRates ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator , typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDeviceFragmentShadingRateKHR,Allocator>>::type PhysicalDevice::getFragmentShadingRatesKHR(Dispatch const &d ) const
+  {
+    std::vector<PhysicalDeviceFragmentShadingRateKHR,Allocator> fragmentShadingRates;
+    uint32_t fragmentShadingRateCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, &fragmentShadingRateCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && fragmentShadingRateCount )
+      {
+        fragmentShadingRates.resize( fragmentShadingRateCount );
+        result = static_cast<Result>( d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, &fragmentShadingRateCount, reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR*>( fragmentShadingRates.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() );
+      fragmentShadingRates.resize( fragmentShadingRateCount );
+    }
+    return createResultValue( result, fragmentShadingRates, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getFragmentShadingRatesKHR" );
+  }
+  template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, PhysicalDeviceFragmentShadingRateKHR>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDeviceFragmentShadingRateKHR,Allocator>>::type PhysicalDevice::getFragmentShadingRatesKHR(Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<PhysicalDeviceFragmentShadingRateKHR,Allocator> fragmentShadingRates( vectorAllocator );
+    uint32_t fragmentShadingRateCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, &fragmentShadingRateCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && fragmentShadingRateCount )
+      {
+        fragmentShadingRates.resize( fragmentShadingRateCount );
+        result = static_cast<Result>( d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, &fragmentShadingRateCount, reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR*>( fragmentShadingRates.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() );
+      fragmentShadingRates.resize( fragmentShadingRateCount );
+    }
+    return createResultValue( result, fragmentShadingRates, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getFragmentShadingRatesKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ImageFormatProperties* pImageFormatProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkImageTiling>( tiling ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageCreateFlags>( flags ), reinterpret_cast< VkImageFormatProperties *>( pImageFormatProperties ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties>::type PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties;
+    Result result = static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkImageTiling>( tiling ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageCreateFlags>( flags ), reinterpret_cast< VkImageFormatProperties *>( &imageFormatProperties ) ) );
+    return createResultValue( result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2* pImageFormatInfo, VULKAN_HPP_NAMESPACE::ImageFormatProperties2* pImageFormatProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( pImageFormatInfo ), reinterpret_cast< VkImageFormatProperties2 *>( pImageFormatProperties ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type PhysicalDevice::getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties;
+    Result result = static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ), reinterpret_cast< VkImageFormatProperties2 *>( &imageFormatProperties ) ) );
+    return createResultValue( result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" );
+  }
+
+  template <typename X, typename Y, typename... Z, typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type PhysicalDevice::getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const
+  {
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>();
+    Result result = static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ), reinterpret_cast< VkImageFormatProperties2 *>( &imageFormatProperties ) ) );
+    return createResultValue( result, structureChain, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getImageFormatProperties2" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2* pImageFormatInfo, VULKAN_HPP_NAMESPACE::ImageFormatProperties2* pImageFormatProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( pImageFormatInfo ), reinterpret_cast< VkImageFormatProperties2 *>( pImageFormatProperties ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type PhysicalDevice::getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties;
+    Result result = static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ), reinterpret_cast< VkImageFormatProperties2 *>( &imageFormatProperties ) ) );
+    return createResultValue( result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" );
+  }
+
+  template <typename X, typename Y, typename... Z, typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type PhysicalDevice::getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const
+  {
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>();
+    Result result = static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ), reinterpret_cast< VkImageFormatProperties2 *>( &imageFormatProperties ) ) );
+    return createResultValue( result, structureChain, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getImageFormatProperties2KHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties* pMemoryProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast< VkPhysicalDeviceMemoryProperties *>( pMemoryProperties ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties PhysicalDevice::getMemoryProperties( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties;
+    d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast< VkPhysicalDeviceMemoryProperties *>( &memoryProperties ) );
+    return memoryProperties;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2* pMemoryProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast< VkPhysicalDeviceMemoryProperties2 *>( pMemoryProperties ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 PhysicalDevice::getMemoryProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties;
+    d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast< VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
+    return memoryProperties;
+  }
+
+  template <typename X, typename Y, typename... Z, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getMemoryProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2>();
+    d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast< VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
+    return structureChain;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2* pMemoryProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast< VkPhysicalDeviceMemoryProperties2 *>( pMemoryProperties ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 PhysicalDevice::getMemoryProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties;
+    d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast< VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
+    return memoryProperties;
+  }
+
+  template <typename X, typename Y, typename... Z, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getMemoryProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2>();
+    d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast< VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
+    return structureChain;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT* pMultisampleProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetPhysicalDeviceMultisamplePropertiesEXT( m_physicalDevice, static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast< VkMultisamplePropertiesEXT *>( pMultisampleProperties ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT PhysicalDevice::getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT multisampleProperties;
+    d.vkGetPhysicalDeviceMultisamplePropertiesEXT( m_physicalDevice, static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast< VkMultisamplePropertiesEXT *>( &multisampleProperties ) );
+    return multisampleProperties;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pRectCount, VULKAN_HPP_NAMESPACE::Rect2D* pRects, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pRectCount, reinterpret_cast<VkRect2D*>( pRects ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator , typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<Rect2D,Allocator>>::type PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const
+  {
+    std::vector<Rect2D,Allocator> rects;
+    uint32_t rectCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && rectCount )
+      {
+        rects.resize( rectCount );
+        result = static_cast<Result>( d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, reinterpret_cast<VkRect2D*>( rects.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( rectCount <= rects.size() );
+      rects.resize( rectCount );
+    }
+    return createResultValue( result, rects, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getPresentRectanglesKHR" );
+  }
+  template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, Rect2D>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<Rect2D,Allocator>>::type PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<Rect2D,Allocator> rects( vectorAllocator );
+    uint32_t rectCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && rectCount )
+      {
+        rects.resize( rectCount );
+        result = static_cast<Result>( d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, reinterpret_cast<VkRect2D*>( rects.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( rectCount <= rects.size() );
+      rects.resize( rectCount );
+    }
+    return createResultValue( result, rects, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getPresentRectanglesKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties* pProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast< VkPhysicalDeviceProperties *>( pProperties ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties PhysicalDevice::getProperties( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties;
+    d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast< VkPhysicalDeviceProperties *>( &properties ) );
+    return properties;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2* pProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast< VkPhysicalDeviceProperties2 *>( pProperties ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 PhysicalDevice::getProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties;
+    d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast< VkPhysicalDeviceProperties2 *>( &properties ) );
+    return properties;
+  }
+
+  template <typename X, typename Y, typename... Z, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>();
+    d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast< VkPhysicalDeviceProperties2 *>( &properties ) );
+    return structureChain;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2* pProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast< VkPhysicalDeviceProperties2 *>( pProperties ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 PhysicalDevice::getProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties;
+    d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast< VkPhysicalDeviceProperties2 *>( &properties ) );
+    return properties;
+  }
+
+  template <typename X, typename Y, typename... Z, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>();
+    d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast< VkPhysicalDeviceProperties2 *>( &properties ) );
+    return structureChain;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR( const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR* pPerformanceQueryCreateInfo, uint32_t* pNumPasses, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( m_physicalDevice, reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR *>( pPerformanceQueryCreateInfo ), pNumPasses );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR( const QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    uint32_t numPasses;
+    d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( m_physicalDevice, reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR *>( &performanceQueryCreateInfo ), &numPasses );
+    return numPasses;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties( uint32_t* pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties* pQueueFamilyProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast< VkQueueFamilyProperties *>( pQueueFamilyProperties ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename QueueFamilyPropertiesAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<QueueFamilyProperties, QueueFamilyPropertiesAllocator> PhysicalDevice::getQueueFamilyProperties( Dispatch const & d ) const
+  {
+    std::vector<QueueFamilyProperties, QueueFamilyPropertiesAllocator> queueFamilyProperties;
+    uint32_t queueFamilyPropertyCount;
+    d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
+    queueFamilyProperties.resize( queueFamilyPropertyCount );
+    d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties *>( queueFamilyProperties.data() ) );
+    VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
+    return queueFamilyProperties;
+  }
+
+  template <typename QueueFamilyPropertiesAllocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, QueueFamilyProperties>::value, int>::type >
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<QueueFamilyProperties, QueueFamilyPropertiesAllocator> PhysicalDevice::getQueueFamilyProperties( QueueFamilyPropertiesAllocator & queueFamilyPropertiesAllocator, Dispatch const & d ) const
+  {
+    std::vector<QueueFamilyProperties, QueueFamilyPropertiesAllocator> queueFamilyProperties( queueFamilyPropertiesAllocator );
+    uint32_t queueFamilyPropertyCount;
+    d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
+    queueFamilyProperties.resize( queueFamilyPropertyCount );
+    d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties *>( queueFamilyProperties.data() ) );
+    VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
+    return queueFamilyProperties;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties2( uint32_t* pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties2* pQueueFamilyProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2*>( pQueueFamilyProperties ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator , typename Dispatch>
+  VULKAN_HPP_INLINE std::vector<QueueFamilyProperties2,Allocator> PhysicalDevice::getQueueFamilyProperties2(Dispatch const &d ) const
+  {
+    std::vector<QueueFamilyProperties2,Allocator> queueFamilyProperties;
+    uint32_t queueFamilyPropertyCount;
+    d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
+    queueFamilyProperties.resize( queueFamilyPropertyCount );
+    d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2*>( queueFamilyProperties.data() ) );
+    return queueFamilyProperties;
+  }
+  template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, QueueFamilyProperties2>::value, int>::type>
+  VULKAN_HPP_INLINE std::vector<QueueFamilyProperties2,Allocator> PhysicalDevice::getQueueFamilyProperties2(Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<QueueFamilyProperties2,Allocator> queueFamilyProperties( vectorAllocator );
+    uint32_t queueFamilyPropertyCount;
+    d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
+    queueFamilyProperties.resize( queueFamilyPropertyCount );
+    d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2*>( queueFamilyProperties.data() ) );
+    return queueFamilyProperties;
+  }
+  template<typename StructureChain, typename Allocator , typename Dispatch>
+  VULKAN_HPP_INLINE std::vector<StructureChain,Allocator> PhysicalDevice::getQueueFamilyProperties2(Dispatch const &d ) const
+  {
+    std::vector<StructureChain,Allocator> queueFamilyProperties;
+    uint32_t queueFamilyPropertyCount;
+    d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
+    queueFamilyProperties.resize( queueFamilyPropertyCount );
+    std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> localVector( queueFamilyPropertyCount );
+    for ( uint32_t i = 0; i < queueFamilyPropertyCount ; i++ )
+    {
+      localVector[i].pNext = queueFamilyProperties[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
+    }
+    d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2*>( localVector.data() ) );
+    for ( uint32_t i = 0; i < queueFamilyPropertyCount ; i++ )
+    {
+      queueFamilyProperties[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = localVector[i];
+    }
+    return queueFamilyProperties;
+  }
+  template<typename StructureChain, typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, StructureChain>::value, int>::type>
+  VULKAN_HPP_INLINE std::vector<StructureChain,Allocator> PhysicalDevice::getQueueFamilyProperties2(Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<StructureChain,Allocator> queueFamilyProperties( vectorAllocator );
+    uint32_t queueFamilyPropertyCount;
+    d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
+    queueFamilyProperties.resize( queueFamilyPropertyCount );
+    std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> localVector( queueFamilyPropertyCount );
+    for ( uint32_t i = 0; i < queueFamilyPropertyCount ; i++ )
+    {
+      localVector[i].pNext = queueFamilyProperties[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
+    }
+    d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2*>( localVector.data() ) );
+    for ( uint32_t i = 0; i < queueFamilyPropertyCount ; i++ )
+    {
+      queueFamilyProperties[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = localVector[i];
+    }
+    return queueFamilyProperties;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties2KHR( uint32_t* pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties2* pQueueFamilyProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2*>( pQueueFamilyProperties ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator , typename Dispatch>
+  VULKAN_HPP_INLINE std::vector<QueueFamilyProperties2,Allocator> PhysicalDevice::getQueueFamilyProperties2KHR(Dispatch const &d ) const
+  {
+    std::vector<QueueFamilyProperties2,Allocator> queueFamilyProperties;
+    uint32_t queueFamilyPropertyCount;
+    d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
+    queueFamilyProperties.resize( queueFamilyPropertyCount );
+    d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2*>( queueFamilyProperties.data() ) );
+    return queueFamilyProperties;
+  }
+  template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, QueueFamilyProperties2>::value, int>::type>
+  VULKAN_HPP_INLINE std::vector<QueueFamilyProperties2,Allocator> PhysicalDevice::getQueueFamilyProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<QueueFamilyProperties2,Allocator> queueFamilyProperties( vectorAllocator );
+    uint32_t queueFamilyPropertyCount;
+    d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
+    queueFamilyProperties.resize( queueFamilyPropertyCount );
+    d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2*>( queueFamilyProperties.data() ) );
+    return queueFamilyProperties;
+  }
+  template<typename StructureChain, typename Allocator , typename Dispatch>
+  VULKAN_HPP_INLINE std::vector<StructureChain,Allocator> PhysicalDevice::getQueueFamilyProperties2KHR(Dispatch const &d ) const
+  {
+    std::vector<StructureChain,Allocator> queueFamilyProperties;
+    uint32_t queueFamilyPropertyCount;
+    d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
+    queueFamilyProperties.resize( queueFamilyPropertyCount );
+    std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> localVector( queueFamilyPropertyCount );
+    for ( uint32_t i = 0; i < queueFamilyPropertyCount ; i++ )
+    {
+      localVector[i].pNext = queueFamilyProperties[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
+    }
+    d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2*>( localVector.data() ) );
+    for ( uint32_t i = 0; i < queueFamilyPropertyCount ; i++ )
+    {
+      queueFamilyProperties[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = localVector[i];
+    }
+    return queueFamilyProperties;
+  }
+  template<typename StructureChain, typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, StructureChain>::value, int>::type>
+  VULKAN_HPP_INLINE std::vector<StructureChain,Allocator> PhysicalDevice::getQueueFamilyProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<StructureChain,Allocator> queueFamilyProperties( vectorAllocator );
+    uint32_t queueFamilyPropertyCount;
+    d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
+    queueFamilyProperties.resize( queueFamilyPropertyCount );
+    std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> localVector( queueFamilyPropertyCount );
+    for ( uint32_t i = 0; i < queueFamilyPropertyCount ; i++ )
+    {
+      localVector[i].pNext = queueFamilyProperties[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
+    }
+    d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2*>( localVector.data() ) );
+    for ( uint32_t i = 0; i < queueFamilyPropertyCount ; i++ )
+    {
+      queueFamilyProperties[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = localVector[i];
+    }
+    return queueFamilyProperties;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties* pProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkSampleCountFlagBits>( samples ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageTiling>( tiling ), pPropertyCount, reinterpret_cast< VkSparseImageFormatProperties *>( pProperties ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename SparseImageFormatPropertiesAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, Dispatch const & d ) const
+  {
+    std::vector<SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> properties;
+    uint32_t propertyCount;
+    d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkSampleCountFlagBits>( samples ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageTiling>( tiling ), &propertyCount, nullptr );
+    properties.resize( propertyCount );
+    d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkSampleCountFlagBits>( samples ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageTiling>( tiling ), &propertyCount, reinterpret_cast<VkSparseImageFormatProperties *>( properties.data() ) );
+    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+    return properties;
+  }
+
+  template <typename SparseImageFormatPropertiesAllocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, SparseImageFormatProperties>::value, int>::type >
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, SparseImageFormatPropertiesAllocator & sparseImageFormatPropertiesAllocator, Dispatch const & d ) const
+  {
+    std::vector<SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> properties( sparseImageFormatPropertiesAllocator );
+    uint32_t propertyCount;
+    d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkSampleCountFlagBits>( samples ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageTiling>( tiling ), &propertyCount, nullptr );
+    properties.resize( propertyCount );
+    d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkSampleCountFlagBits>( samples ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageTiling>( tiling ), &propertyCount, reinterpret_cast<VkSparseImageFormatProperties *>( properties.data() ) );
+    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+    return properties;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2* pProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( pFormatInfo ), pPropertyCount, reinterpret_cast< VkSparseImageFormatProperties2 *>( pProperties ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename SparseImageFormatProperties2Allocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> PhysicalDevice::getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const & d ) const
+  {
+    std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties;
+    uint32_t propertyCount;
+    d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, nullptr );
+    properties.resize( propertyCount );
+    d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
+    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+    return properties;
+  }
+
+  template <typename SparseImageFormatProperties2Allocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, SparseImageFormatProperties2>::value, int>::type >
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> PhysicalDevice::getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator, Dispatch const & d ) const
+  {
+    std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties( sparseImageFormatProperties2Allocator );
+    uint32_t propertyCount;
+    d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, nullptr );
+    properties.resize( propertyCount );
+    d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
+    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+    return properties;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2* pProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( pFormatInfo ), pPropertyCount, reinterpret_cast< VkSparseImageFormatProperties2 *>( pProperties ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename SparseImageFormatProperties2Allocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> PhysicalDevice::getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const & d ) const
+  {
+    std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties;
+    uint32_t propertyCount;
+    d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, nullptr );
+    properties.resize( propertyCount );
+    d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
+    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+    return properties;
+  }
+
+  template <typename SparseImageFormatProperties2Allocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, SparseImageFormatProperties2>::value, int>::type >
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> PhysicalDevice::getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator, Dispatch const & d ) const
+  {
+    std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties( sparseImageFormatProperties2Allocator );
+    uint32_t propertyCount;
+    d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, nullptr );
+    properties.resize( propertyCount );
+    d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
+    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+    return properties;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( uint32_t* pCombinationCount, VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV* pCombinations, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, pCombinationCount, reinterpret_cast<VkFramebufferMixedSamplesCombinationNV*>( pCombinations ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator , typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<FramebufferMixedSamplesCombinationNV,Allocator>>::type PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV(Dispatch const &d ) const
+  {
+    std::vector<FramebufferMixedSamplesCombinationNV,Allocator> combinations;
+    uint32_t combinationCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, &combinationCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && combinationCount )
+      {
+        combinations.resize( combinationCount );
+        result = static_cast<Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, &combinationCount, reinterpret_cast<VkFramebufferMixedSamplesCombinationNV*>( combinations.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( combinationCount <= combinations.size() );
+      combinations.resize( combinationCount );
+    }
+    return createResultValue( result, combinations, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" );
+  }
+  template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, FramebufferMixedSamplesCombinationNV>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<FramebufferMixedSamplesCombinationNV,Allocator>>::type PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV(Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<FramebufferMixedSamplesCombinationNV,Allocator> combinations( vectorAllocator );
+    uint32_t combinationCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, &combinationCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && combinationCount )
+      {
+        combinations.resize( combinationCount );
+        result = static_cast<Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, &combinationCount, reinterpret_cast<VkFramebufferMixedSamplesCombinationNV*>( combinations.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( combinationCount <= combinations.size() );
+      combinations.resize( combinationCount );
+    }
+    return createResultValue( result, combinations, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT* pSurfaceCapabilities, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast< VkSurfaceCapabilities2EXT *>( pSurfaceCapabilities ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT>::type PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT surfaceCapabilities;
+    Result result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast< VkSurfaceCapabilities2EXT *>( &surfaceCapabilities ) ) );
+    return createResultValue( result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2EXT" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR* pSurfaceCapabilities, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ), reinterpret_cast< VkSurfaceCapabilities2KHR *>( pSurfaceCapabilities ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>::type PhysicalDevice::getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR surfaceCapabilities;
+    Result result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), reinterpret_cast< VkSurfaceCapabilities2KHR *>( &surfaceCapabilities ) ) );
+    return createResultValue( result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" );
+  }
+
+  template <typename X, typename Y, typename... Z, typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type PhysicalDevice::getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const
+  {
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR & surfaceCapabilities = structureChain.template get<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>();
+    Result result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), reinterpret_cast< VkSurfaceCapabilities2KHR *>( &surfaceCapabilities ) ) );
+    return createResultValue( result, structureChain, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceCapabilities2KHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR* pSurfaceCapabilities, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast< VkSurfaceCapabilitiesKHR *>( pSurfaceCapabilities ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR>::type PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities;
+    Result result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast< VkSurfaceCapabilitiesKHR *>( &surfaceCapabilities ) ) );
+    return createResultValue( result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilitiesKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR* pSurfaceFormats, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( pSurfaceInfo ), pSurfaceFormatCount, reinterpret_cast<VkSurfaceFormat2KHR*>( pSurfaceFormats ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator , typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<SurfaceFormat2KHR,Allocator>>::type PhysicalDevice::getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const
+  {
+    std::vector<SurfaceFormat2KHR,Allocator> surfaceFormats;
+    uint32_t surfaceFormatCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( &surfaceInfo ), &surfaceFormatCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && surfaceFormatCount )
+      {
+        surfaceFormats.resize( surfaceFormatCount );
+        result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( &surfaceInfo ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormat2KHR*>( surfaceFormats.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
+      surfaceFormats.resize( surfaceFormatCount );
+    }
+    return createResultValue( result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceFormats2KHR" );
+  }
+  template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, SurfaceFormat2KHR>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<SurfaceFormat2KHR,Allocator>>::type PhysicalDevice::getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<SurfaceFormat2KHR,Allocator> surfaceFormats( vectorAllocator );
+    uint32_t surfaceFormatCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( &surfaceInfo ), &surfaceFormatCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && surfaceFormatCount )
+      {
+        surfaceFormats.resize( surfaceFormatCount );
+        result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( &surfaceInfo ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormat2KHR*>( surfaceFormats.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
+      surfaceFormats.resize( surfaceFormatCount );
+    }
+    return createResultValue( result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceFormats2KHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pSurfaceFormatCount, VULKAN_HPP_NAMESPACE::SurfaceFormatKHR* pSurfaceFormats, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pSurfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR*>( pSurfaceFormats ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator , typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<SurfaceFormatKHR,Allocator>>::type PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const
+  {
+    std::vector<SurfaceFormatKHR,Allocator> surfaceFormats;
+    uint32_t surfaceFormatCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && surfaceFormatCount )
+      {
+        surfaceFormats.resize( surfaceFormatCount );
+        result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR*>( surfaceFormats.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
+      surfaceFormats.resize( surfaceFormatCount );
+    }
+    return createResultValue( result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceFormatsKHR" );
+  }
+  template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, SurfaceFormatKHR>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<SurfaceFormatKHR,Allocator>>::type PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<SurfaceFormatKHR,Allocator> surfaceFormats( vectorAllocator );
+    uint32_t surfaceFormatCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && surfaceFormatCount )
+      {
+        surfaceFormats.resize( surfaceFormatCount );
+        result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR*>( surfaceFormats.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
+      surfaceFormats.resize( surfaceFormatCount );
+    }
+    return createResultValue( result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceFormatsKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  template<typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pPresentModeCount, VULKAN_HPP_NAMESPACE::PresentModeKHR* pPresentModes, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( pSurfaceInfo ), pPresentModeCount, reinterpret_cast<VkPresentModeKHR*>( pPresentModes ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator , typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PresentModeKHR,Allocator>>::type PhysicalDevice::getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const
+  {
+    std::vector<PresentModeKHR,Allocator> presentModes;
+    uint32_t presentModeCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( &surfaceInfo ), &presentModeCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && presentModeCount )
+      {
+        presentModes.resize( presentModeCount );
+        result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( &surfaceInfo ), &presentModeCount, reinterpret_cast<VkPresentModeKHR*>( presentModes.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
+      presentModes.resize( presentModeCount );
+    }
+    return createResultValue( result, presentModes, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfacePresentModes2EXT" );
+  }
+  template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, PresentModeKHR>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PresentModeKHR,Allocator>>::type PhysicalDevice::getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<PresentModeKHR,Allocator> presentModes( vectorAllocator );
+    uint32_t presentModeCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( &surfaceInfo ), &presentModeCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && presentModeCount )
+      {
+        presentModes.resize( presentModeCount );
+        result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( &surfaceInfo ), &presentModeCount, reinterpret_cast<VkPresentModeKHR*>( presentModes.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
+      presentModes.resize( presentModeCount );
+    }
+    return createResultValue( result, presentModes, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfacePresentModes2EXT" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pPresentModeCount, VULKAN_HPP_NAMESPACE::PresentModeKHR* pPresentModes, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pPresentModeCount, reinterpret_cast<VkPresentModeKHR*>( pPresentModes ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator , typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PresentModeKHR,Allocator>>::type PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const
+  {
+    std::vector<PresentModeKHR,Allocator> presentModes;
+    uint32_t presentModeCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && presentModeCount )
+      {
+        presentModes.resize( presentModeCount );
+        result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, reinterpret_cast<VkPresentModeKHR*>( presentModes.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
+      presentModes.resize( presentModeCount );
+    }
+    return createResultValue( result, presentModes, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfacePresentModesKHR" );
+  }
+  template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, PresentModeKHR>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PresentModeKHR,Allocator>>::type PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<PresentModeKHR,Allocator> presentModes( vectorAllocator );
+    uint32_t presentModeCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && presentModeCount )
+      {
+        presentModes.resize( presentModeCount );
+        result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, reinterpret_cast<VkPresentModeKHR*>( presentModes.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
+      presentModes.resize( presentModeCount );
+    }
+    return createResultValue( result, presentModes, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfacePresentModesKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::Bool32* pSupported, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceSupportKHR( m_physicalDevice, queueFamilyIndex, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast< VkBool32 *>( pSupported ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Bool32>::type PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::Bool32 supported;
+    Result result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceSupportKHR( m_physicalDevice, queueFamilyIndex, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast< VkBool32 *>( &supported ) ) );
+    return createResultValue( result, supported, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceSupportKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getToolPropertiesEXT( uint32_t* pToolCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceToolPropertiesEXT* pToolProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, pToolCount, reinterpret_cast<VkPhysicalDeviceToolPropertiesEXT*>( pToolProperties ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator , typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDeviceToolPropertiesEXT,Allocator>>::type PhysicalDevice::getToolPropertiesEXT(Dispatch const &d ) const
+  {
+    std::vector<PhysicalDeviceToolPropertiesEXT,Allocator> toolProperties;
+    uint32_t toolCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && toolCount )
+      {
+        toolProperties.resize( toolCount );
+        result = static_cast<Result>( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolPropertiesEXT*>( toolProperties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() );
+      toolProperties.resize( toolCount );
+    }
+    return createResultValue( result, toolProperties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getToolPropertiesEXT" );
+  }
+  template<typename Allocator , typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, PhysicalDeviceToolPropertiesEXT>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDeviceToolPropertiesEXT,Allocator>>::type PhysicalDevice::getToolPropertiesEXT(Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<PhysicalDeviceToolPropertiesEXT,Allocator> toolProperties( vectorAllocator );
+    uint32_t toolCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && toolCount )
+      {
+        toolProperties.resize( toolCount );
+        result = static_cast<Result>( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolPropertiesEXT*>( toolProperties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() );
+      toolProperties.resize( toolCount );
+    }
+    return createResultValue( result, toolProperties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getToolPropertiesEXT" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
 #ifdef VK_USE_PLATFORM_WAYLAND_KHR
   template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Instance::createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d) const
+  VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display* display, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
   {
-    return static_cast<Result>( d.vkCreateWaylandSurfaceKHR( m_instance, reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
+    return static_cast<Bool32>( d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, display ) );
   }
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
   template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<SurfaceKHR>::type Instance::createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display & display, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
   {
-    SurfaceKHR surface;
-    Result result = static_cast<Result>( d.vkCreateWaylandSurfaceKHR( m_instance, reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
-    return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createWaylandSurfaceKHR" );
+    return d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &display );
   }
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type Instance::createWaylandSurfaceKHRUnique( const WaylandSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    SurfaceKHR surface;
-    Result result = static_cast<Result>( d.vkCreateWaylandSurfaceKHR( m_instance, reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
-
-    ObjectDestroy<Instance,Dispatch> deleter( *this, allocator, d );
-    return createResultValue<SurfaceKHR,Dispatch>( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createWaylandSurfaceKHRUnique", deleter );
-  }
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 #endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
 
 #ifdef VK_USE_PLATFORM_WIN32_KHR
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
   template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Instance::createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d) const
+  VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
   {
-    return static_cast<Result>( d.vkCreateWin32SurfaceKHR( m_instance, reinterpret_cast<const VkWin32SurfaceCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
+    return static_cast<Bool32>( d.vkGetPhysicalDeviceWin32PresentationSupportKHR( m_physicalDevice, queueFamilyIndex ) );
   }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+#else
   template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<SurfaceKHR>::type Instance::createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
   {
-    SurfaceKHR surface;
-    Result result = static_cast<Result>( d.vkCreateWin32SurfaceKHR( m_instance, reinterpret_cast<const VkWin32SurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
-    return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createWin32SurfaceKHR" );
+    return d.vkGetPhysicalDeviceWin32PresentationSupportKHR( m_physicalDevice, queueFamilyIndex );
   }
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type Instance::createWin32SurfaceKHRUnique( const Win32SurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    SurfaceKHR surface;
-    Result result = static_cast<Result>( d.vkCreateWin32SurfaceKHR( m_instance, reinterpret_cast<const VkWin32SurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
-
-    ObjectDestroy<Instance,Dispatch> deleter( *this, allocator, d );
-    return createResultValue<SurfaceKHR,Dispatch>( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createWin32SurfaceKHRUnique", deleter );
-  }
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 #endif /*VK_USE_PLATFORM_WIN32_KHR*/
 
-#ifdef VK_USE_PLATFORM_XLIB_KHR
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Instance::createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkCreateXlibSurfaceKHR( m_instance, reinterpret_cast<const VkXlibSurfaceCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<SurfaceKHR>::type Instance::createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    SurfaceKHR surface;
-    Result result = static_cast<Result>( d.vkCreateXlibSurfaceKHR( m_instance, reinterpret_cast<const VkXlibSurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
-    return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createXlibSurfaceKHR" );
-  }
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type Instance::createXlibSurfaceKHRUnique( const XlibSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    SurfaceKHR surface;
-    Result result = static_cast<Result>( d.vkCreateXlibSurfaceKHR( m_instance, reinterpret_cast<const VkXlibSurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
-
-    ObjectDestroy<Instance,Dispatch> deleter( *this, allocator, d );
-    return createResultValue<SurfaceKHR,Dispatch>( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createXlibSurfaceKHRUnique", deleter );
-  }
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_XLIB_KHR*/
-
 #ifdef VK_USE_PLATFORM_XCB_KHR
   template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Instance::createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d) const
+  VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
   {
-    return static_cast<Result>( d.vkCreateXcbSurfaceKHR( m_instance, reinterpret_cast<const VkXcbSurfaceCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
+    return static_cast<Bool32>( d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, connection, visual_id ) );
   }
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
   template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<SurfaceKHR>::type Instance::createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t & connection, xcb_visualid_t visual_id, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
   {
-    SurfaceKHR surface;
-    Result result = static_cast<Result>( d.vkCreateXcbSurfaceKHR( m_instance, reinterpret_cast<const VkXcbSurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
-    return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createXcbSurfaceKHR" );
+    return d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &connection, visual_id );
   }
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type Instance::createXcbSurfaceKHRUnique( const XcbSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    SurfaceKHR surface;
-    Result result = static_cast<Result>( d.vkCreateXcbSurfaceKHR( m_instance, reinterpret_cast<const VkXcbSurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
-
-    ObjectDestroy<Instance,Dispatch> deleter( *this, allocator, d );
-    return createResultValue<SurfaceKHR,Dispatch>( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createXcbSurfaceKHRUnique", deleter );
-  }
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 #endif /*VK_USE_PLATFORM_XCB_KHR*/
 
-#ifdef VK_USE_PLATFORM_FUCHSIA_FUCHSIA
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Instance::createImagePipeSurfaceFUCHSIA( const ImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d) const
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display* dpy, VisualID visualID, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
-    return static_cast<Result>( d.vkCreateImagePipeSurfaceFUCHSIA( m_instance, reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
+    return static_cast<Bool32>( d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, dpy, visualID ) );
   }
+
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<SurfaceKHR>::type Instance::createImagePipeSurfaceFUCHSIA( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
   {
-    SurfaceKHR surface;
-    Result result = static_cast<Result>( d.vkCreateImagePipeSurfaceFUCHSIA( m_instance, reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
-    return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createImagePipeSurfaceFUCHSIA" );
-  }
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type Instance::createImagePipeSurfaceFUCHSIAUnique( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    SurfaceKHR surface;
-    Result result = static_cast<Result>( d.vkCreateImagePipeSurfaceFUCHSIA( m_instance, reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
-
-    ObjectDestroy<Instance,Dispatch> deleter( *this, allocator, d );
-    return createResultValue<SurfaceKHR,Dispatch>( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createImagePipeSurfaceFUCHSIAUnique", deleter );
-  }
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_FUCHSIA_FUCHSIA*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Instance::createDebugReportCallbackEXT( const DebugReportCallbackCreateInfoEXT* pCreateInfo, const AllocationCallbacks* pAllocator, DebugReportCallbackEXT* pCallback, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkCreateDebugReportCallbackEXT( m_instance, reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDebugReportCallbackEXT*>( pCallback ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<DebugReportCallbackEXT>::type Instance::createDebugReportCallbackEXT( const DebugReportCallbackCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    DebugReportCallbackEXT callback;
-    Result result = static_cast<Result>( d.vkCreateDebugReportCallbackEXT( m_instance, reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDebugReportCallbackEXT*>( &callback ) ) );
-    return createResultValue( result, callback, VULKAN_HPP_NAMESPACE_STRING"::Instance::createDebugReportCallbackEXT" );
-  }
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<DebugReportCallbackEXT,Dispatch>>::type Instance::createDebugReportCallbackEXTUnique( const DebugReportCallbackCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    DebugReportCallbackEXT callback;
-    Result result = static_cast<Result>( d.vkCreateDebugReportCallbackEXT( m_instance, reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDebugReportCallbackEXT*>( &callback ) ) );
-
-    ObjectDestroy<Instance,Dispatch> deleter( *this, allocator, d );
-    return createResultValue<DebugReportCallbackEXT,Dispatch>( result, callback, VULKAN_HPP_NAMESPACE_STRING"::Instance::createDebugReportCallbackEXTUnique", deleter );
-  }
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( DebugReportCallbackEXT callback, const AllocationCallbacks* pAllocator, Dispatch const &d) const
-  {
-    d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( DebugReportCallbackEXT callback, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+    return d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &dpy, visualID );
   }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Instance::destroy( DebugReportCallbackEXT callback, const AllocationCallbacks* pAllocator, Dispatch const &d) const
+
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getRandROutputDisplayEXT( Display* dpy, RROutput rrOutput, VULKAN_HPP_NAMESPACE::DisplayKHR* pDisplay, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
-    d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+    return static_cast<Result>( d.vkGetRandROutputDisplayEXT( m_physicalDevice, dpy, rrOutput, reinterpret_cast< VkDisplayKHR *>( pDisplay ) ) );
   }
+
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Instance::destroy( DebugReportCallbackEXT callback, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type PhysicalDevice::getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput, Dispatch const & d ) const
   {
-    d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+    VULKAN_HPP_NAMESPACE::DisplayKHR display;
+    Result result = static_cast<Result>( d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast< VkDisplayKHR *>( &display ) ) );
+    return createResultValue( result, display, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getRandROutputDisplayEXT" );
   }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>>::type PhysicalDevice::getRandROutputDisplayEXTUnique( Display & dpy, RROutput rrOutput, Dispatch const & d ) const
+  {
+    VULKAN_HPP_NAMESPACE::DisplayKHR display;
+    Result result = static_cast<Result>( d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast< VkDisplayKHR *>( &display ) ) );
+    ObjectRelease<PhysicalDevice, Dispatch> deleter( *this, d );
+    return createResultValue<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>( result, display, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getRandROutputDisplayEXTUnique", deleter );
+  }
+#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( DebugReportFlagsEXT flags, DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage, Dispatch const &d) const
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE Result PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
-    d.vkDebugReportMessageEXT( m_instance, static_cast<VkDebugReportFlagsEXT>( flags ), static_cast<VkDebugReportObjectTypeEXT>( objectType ), object, location, messageCode, pLayerPrefix, pMessage );
+    return static_cast<Result>( d.vkReleaseDisplayEXT( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) );
   }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( DebugReportFlagsEXT flags, DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const std::string & layerPrefix, const std::string & message, Dispatch const &d ) const
-  {
-#ifdef VULKAN_HPP_NO_EXCEPTIONS
-    VULKAN_HPP_ASSERT( layerPrefix.size() == message.size() );
 #else
-    if ( layerPrefix.size() != message.size() )
-    {
-      throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Instance::debugReportMessageEXT: layerPrefix.size() != message.size()" );
-    }
-#endif  // VULKAN_HPP_NO_EXCEPTIONS
-    d.vkDebugReportMessageEXT( m_instance, static_cast<VkDebugReportFlagsEXT>( flags ), static_cast<VkDebugReportObjectTypeEXT>( objectType ), object, location, messageCode, layerPrefix.c_str(), message.c_str() );
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<void>::type PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const
+  {
+    Result result = static_cast<Result>( d.vkReleaseDisplayEXT( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::releaseDisplayEXT" );
   }
 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDeviceGroups( uint32_t* pPhysicalDeviceGroupCount, PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d) const
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Queue::getCheckpointDataNV( uint32_t* pCheckpointDataCount, VULKAN_HPP_NAMESPACE::CheckpointDataNV* pCheckpointData, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
   {
-    return static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups( m_instance, pPhysicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties*>( pPhysicalDeviceGroupProperties ) ) );
+    d.vkGetQueueCheckpointDataNV( m_queue, pCheckpointDataCount, reinterpret_cast< VkCheckpointDataNV *>( pCheckpointData ) );
   }
+
 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDeviceGroupProperties,Allocator>>::type Instance::enumeratePhysicalDeviceGroups(Dispatch const &d ) const
+  template <typename CheckpointDataNVAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<CheckpointDataNV, CheckpointDataNVAllocator> Queue::getCheckpointDataNV( Dispatch const & d ) const
   {
-    std::vector<PhysicalDeviceGroupProperties,Allocator> physicalDeviceGroupProperties;
-    uint32_t physicalDeviceGroupCount;
-    Result result;
-    do
+    std::vector<CheckpointDataNV, CheckpointDataNVAllocator> checkpointData;
+    uint32_t checkpointDataCount;
+    d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, nullptr );
+    checkpointData.resize( checkpointDataCount );
+    d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointDataNV *>( checkpointData.data() ) );
+    VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() );
+    return checkpointData;
+  }
+
+  template <typename CheckpointDataNVAllocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, CheckpointDataNV>::value, int>::type >
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<CheckpointDataNV, CheckpointDataNVAllocator> Queue::getCheckpointDataNV( CheckpointDataNVAllocator & checkpointDataNVAllocator, Dispatch const & d ) const
+  {
+    std::vector<CheckpointDataNV, CheckpointDataNVAllocator> checkpointData( checkpointDataNVAllocator );
+    uint32_t checkpointDataCount;
+    d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, nullptr );
+    checkpointData.resize( checkpointDataCount );
+    d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointDataNV *>( checkpointData.data() ) );
+    VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() );
+    return checkpointData;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::bindSparse( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindSparseInfo* pBindInfo, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkQueueBindSparse( m_queue, bindInfoCount, reinterpret_cast<const VkBindSparseInfo *>( pBindInfo ), static_cast<VkFence>( fence ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::bindSparse( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindSparseInfo> const & bindInfo, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const
+  {
+    Result result = static_cast<Result>( d.vkQueueBindSparse( m_queue, bindInfo.size(), reinterpret_cast<const VkBindSparseInfo *>( bindInfo.data() ), static_cast<VkFence>( fence ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::bindSparse" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Queue::endDebugUtilsLabelEXT( Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkQueueEndDebugUtilsLabelEXT( m_queue );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR* pPresentInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR *>( pPresentInfo ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::presentKHR( const PresentInfoKHR & presentInfo, Dispatch const & d ) const
+  {
+    Result result = static_cast<Result>( d.vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR *>( &presentInfo ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::presentKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkQueueSetPerformanceConfigurationINTEL( m_queue, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
+  }
+#else
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const
+  {
+    Result result = static_cast<Result>( d.vkQueueSetPerformanceConfigurationINTEL( m_queue, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::setPerformanceConfigurationINTEL" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::submit( uint32_t submitCount, const VULKAN_HPP_NAMESPACE::SubmitInfo* pSubmits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkQueueSubmit( m_queue, submitCount, reinterpret_cast<const VkSubmitInfo *>( pSubmits ), static_cast<VkFence>( fence ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::submit( ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo> const & submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const
+  {
+    Result result = static_cast<Result>( d.vkQueueSubmit( m_queue, submits.size(), reinterpret_cast<const VkSubmitInfo *>( submits.data() ), static_cast<VkFence>( fence ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::waitIdle( Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Result>( d.vkQueueWaitIdle( m_queue ) );
+  }
+#else
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::waitIdle( Dispatch const & d ) const
+  {
+    Result result = static_cast<Result>( d.vkQueueWaitIdle( m_queue ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::waitIdle" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+  template <> struct StructExtends<AndroidHardwareBufferFormatPropertiesANDROID, AndroidHardwareBufferPropertiesANDROID>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+  template <> struct StructExtends<AndroidHardwareBufferUsageANDROID, ImageFormatProperties2>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+  template <> struct StructExtends<AttachmentDescriptionStencilLayout, AttachmentDescription2>{ enum { value = true }; };
+  template <> struct StructExtends<AttachmentReferenceStencilLayout, AttachmentReference2>{ enum { value = true }; };
+  template <> struct StructExtends<BindBufferMemoryDeviceGroupInfo, BindBufferMemoryInfo>{ enum { value = true }; };
+  template <> struct StructExtends<BindImageMemoryDeviceGroupInfo, BindImageMemoryInfo>{ enum { value = true }; };
+  template <> struct StructExtends<BindImageMemorySwapchainInfoKHR, BindImageMemoryInfo>{ enum { value = true }; };
+  template <> struct StructExtends<BindImagePlaneMemoryInfo, BindImageMemoryInfo>{ enum { value = true }; };
+  template <> struct StructExtends<BufferDeviceAddressCreateInfoEXT, BufferCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<BufferOpaqueCaptureAddressCreateInfo, BufferCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<CommandBufferInheritanceConditionalRenderingInfoEXT, CommandBufferInheritanceInfo>{ enum { value = true }; };
+  template <> struct StructExtends<CommandBufferInheritanceRenderPassTransformInfoQCOM, CommandBufferInheritanceInfo>{ enum { value = true }; };
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  template <> struct StructExtends<D3D12FenceSubmitInfoKHR, SubmitInfo>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+  template <> struct StructExtends<DebugReportCallbackCreateInfoEXT, InstanceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<DebugUtilsMessengerCreateInfoEXT, InstanceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<DedicatedAllocationBufferCreateInfoNV, BufferCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<DedicatedAllocationImageCreateInfoNV, ImageCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<DedicatedAllocationMemoryAllocateInfoNV, MemoryAllocateInfo>{ enum { value = true }; };
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+  template <> struct StructExtends<DeferredOperationInfoKHR, RayTracingPipelineCreateInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<DeferredOperationInfoKHR, AccelerationStructureBuildGeometryInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<DeferredOperationInfoKHR, CopyAccelerationStructureInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<DeferredOperationInfoKHR, CopyMemoryToAccelerationStructureInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<DeferredOperationInfoKHR, CopyAccelerationStructureToMemoryInfoKHR>{ enum { value = true }; };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+  template <> struct StructExtends<DescriptorPoolInlineUniformBlockCreateInfoEXT, DescriptorPoolCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<DescriptorSetLayoutBindingFlagsCreateInfo, DescriptorSetLayoutCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<DescriptorSetVariableDescriptorCountAllocateInfo, DescriptorSetAllocateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<DescriptorSetVariableDescriptorCountLayoutSupport, DescriptorSetLayoutSupport>{ enum { value = true }; };
+  template <> struct StructExtends<DeviceDeviceMemoryReportCreateInfoEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<DeviceDiagnosticsConfigCreateInfoNV, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<DeviceGroupBindSparseInfo, BindSparseInfo>{ enum { value = true }; };
+  template <> struct StructExtends<DeviceGroupCommandBufferBeginInfo, CommandBufferBeginInfo>{ enum { value = true }; };
+  template <> struct StructExtends<DeviceGroupDeviceCreateInfo, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<DeviceGroupPresentInfoKHR, PresentInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<DeviceGroupRenderPassBeginInfo, RenderPassBeginInfo>{ enum { value = true }; };
+  template <> struct StructExtends<DeviceGroupSubmitInfo, SubmitInfo>{ enum { value = true }; };
+  template <> struct StructExtends<DeviceGroupSwapchainCreateInfoKHR, SwapchainCreateInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<DeviceMemoryOverallocationCreateInfoAMD, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<DevicePrivateDataCreateInfoEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<DeviceQueueGlobalPriorityCreateInfoEXT, DeviceQueueCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<DisplayNativeHdrSurfaceCapabilitiesAMD, SurfaceCapabilities2KHR>{ enum { value = true }; };
+  template <> struct StructExtends<DisplayPresentInfoKHR, PresentInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<DrmFormatModifierPropertiesListEXT, FormatProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<ExportFenceCreateInfo, FenceCreateInfo>{ enum { value = true }; };
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  template <> struct StructExtends<ExportFenceWin32HandleInfoKHR, FenceCreateInfo>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+  template <> struct StructExtends<ExportMemoryAllocateInfo, MemoryAllocateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ExportMemoryAllocateInfoNV, MemoryAllocateInfo>{ enum { value = true }; };
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  template <> struct StructExtends<ExportMemoryWin32HandleInfoKHR, MemoryAllocateInfo>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  template <> struct StructExtends<ExportMemoryWin32HandleInfoNV, MemoryAllocateInfo>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+  template <> struct StructExtends<ExportSemaphoreCreateInfo, SemaphoreCreateInfo>{ enum { value = true }; };
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  template <> struct StructExtends<ExportSemaphoreWin32HandleInfoKHR, SemaphoreCreateInfo>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+  template <> struct StructExtends<ExternalFormatANDROID, ImageCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ExternalFormatANDROID, SamplerYcbcrConversionCreateInfo>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+  template <> struct StructExtends<ExternalImageFormatProperties, ImageFormatProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<ExternalMemoryBufferCreateInfo, BufferCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ExternalMemoryImageCreateInfo, ImageCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ExternalMemoryImageCreateInfoNV, ImageCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<FilterCubicImageViewImageFormatPropertiesEXT, ImageFormatProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<FragmentShadingRateAttachmentInfoKHR, SubpassDescription2>{ enum { value = true }; };
+  template <> struct StructExtends<FramebufferAttachmentsCreateInfo, FramebufferCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<GraphicsPipelineShaderGroupsCreateInfoNV, GraphicsPipelineCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ImageDrmFormatModifierExplicitCreateInfoEXT, ImageCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ImageDrmFormatModifierListCreateInfoEXT, ImageCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ImageFormatListCreateInfo, ImageCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ImageFormatListCreateInfo, SwapchainCreateInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<ImageFormatListCreateInfo, PhysicalDeviceImageFormatInfo2>{ enum { value = true }; };
+  template <> struct StructExtends<ImagePlaneMemoryRequirementsInfo, ImageMemoryRequirementsInfo2>{ enum { value = true }; };
+  template <> struct StructExtends<ImageStencilUsageCreateInfo, ImageCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ImageStencilUsageCreateInfo, PhysicalDeviceImageFormatInfo2>{ enum { value = true }; };
+  template <> struct StructExtends<ImageSwapchainCreateInfoKHR, ImageCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ImageViewASTCDecodeModeEXT, ImageViewCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ImageViewUsageCreateInfo, ImageViewCreateInfo>{ enum { value = true }; };
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+  template <> struct StructExtends<ImportAndroidHardwareBufferInfoANDROID, MemoryAllocateInfo>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+  template <> struct StructExtends<ImportMemoryFdInfoKHR, MemoryAllocateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ImportMemoryHostPointerInfoEXT, MemoryAllocateInfo>{ enum { value = true }; };
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  template <> struct StructExtends<ImportMemoryWin32HandleInfoKHR, MemoryAllocateInfo>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  template <> struct StructExtends<ImportMemoryWin32HandleInfoNV, MemoryAllocateInfo>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+  template <> struct StructExtends<MemoryAllocateFlagsInfo, MemoryAllocateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<MemoryDedicatedAllocateInfo, MemoryAllocateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<MemoryDedicatedRequirements, MemoryRequirements2>{ enum { value = true }; };
+  template <> struct StructExtends<MemoryOpaqueCaptureAddressAllocateInfo, MemoryAllocateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<MemoryPriorityAllocateInfoEXT, MemoryAllocateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PerformanceQuerySubmitInfoKHR, SubmitInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevice16BitStorageFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevice16BitStorageFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevice4444FormatsFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevice4444FormatsFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevice8BitStorageFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevice8BitStorageFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceASTCDecodeFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceASTCDecodeFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceBlendOperationAdvancedFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceBlendOperationAdvancedFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceBlendOperationAdvancedPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceBufferDeviceAddressFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceBufferDeviceAddressFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceBufferDeviceAddressFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceBufferDeviceAddressFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceCoherentMemoryFeaturesAMD, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceCoherentMemoryFeaturesAMD, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceComputeShaderDerivativesFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceComputeShaderDerivativesFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceConditionalRenderingFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceConditionalRenderingFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceConservativeRasterizationPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceCooperativeMatrixFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceCooperativeMatrixFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceCooperativeMatrixPropertiesNV, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceCornerSampledImageFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceCornerSampledImageFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceCoverageReductionModeFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceCoverageReductionModeFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceCustomBorderColorFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceCustomBorderColorFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceCustomBorderColorPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceDepthClipEnableFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceDepthClipEnableFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceDepthStencilResolveProperties, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceDescriptorIndexingFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceDescriptorIndexingFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceDescriptorIndexingProperties, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceDeviceGeneratedCommandsFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceDeviceGeneratedCommandsFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceDeviceGeneratedCommandsPropertiesNV, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceDeviceMemoryReportFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceDeviceMemoryReportFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceDiagnosticsConfigFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceDiagnosticsConfigFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceDiscardRectanglePropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceDriverProperties, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceExclusiveScissorFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceExclusiveScissorFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceExtendedDynamicStateFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceExtendedDynamicStateFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceExternalImageFormatInfo, PhysicalDeviceImageFormatInfo2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceExternalMemoryHostPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceFeatures2, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceFloatControlsProperties, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceFragmentDensityMap2FeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceFragmentDensityMap2FeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceFragmentDensityMap2PropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceFragmentDensityMapFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceFragmentDensityMapFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceFragmentDensityMapPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceFragmentShaderBarycentricFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceFragmentShaderBarycentricFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceFragmentShaderInterlockFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceFragmentShaderInterlockFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceFragmentShadingRateFeaturesKHR, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceFragmentShadingRateFeaturesKHR, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceFragmentShadingRatePropertiesKHR, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceHostQueryResetFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceHostQueryResetFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceIDProperties, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceImageDrmFormatModifierInfoEXT, PhysicalDeviceImageFormatInfo2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceImageRobustnessFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceImageRobustnessFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceImageViewImageFormatInfoEXT, PhysicalDeviceImageFormatInfo2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceImagelessFramebufferFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceImagelessFramebufferFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceIndexTypeUint8FeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceIndexTypeUint8FeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceInlineUniformBlockFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceInlineUniformBlockFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceInlineUniformBlockPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceLineRasterizationFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceLineRasterizationFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceLineRasterizationPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceMaintenance3Properties, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceMemoryBudgetPropertiesEXT, PhysicalDeviceMemoryProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceMemoryPriorityFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceMemoryPriorityFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceMeshShaderFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceMeshShaderFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceMeshShaderPropertiesNV, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceMultiviewFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceMultiviewFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceMultiviewProperties, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevicePCIBusInfoPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevicePerformanceQueryFeaturesKHR, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevicePerformanceQueryFeaturesKHR, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevicePerformanceQueryPropertiesKHR, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevicePipelineCreationCacheControlFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevicePipelineCreationCacheControlFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevicePipelineExecutablePropertiesFeaturesKHR, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevicePipelineExecutablePropertiesFeaturesKHR, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevicePointClippingProperties, PhysicalDeviceProperties2>{ enum { value = true }; };
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+  template <> struct StructExtends<PhysicalDevicePortabilitySubsetFeaturesKHR, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevicePortabilitySubsetFeaturesKHR, DeviceCreateInfo>{ enum { value = true }; };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+  template <> struct StructExtends<PhysicalDevicePortabilitySubsetPropertiesKHR, PhysicalDeviceProperties2>{ enum { value = true }; };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+  template <> struct StructExtends<PhysicalDevicePrivateDataFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevicePrivateDataFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceProtectedMemoryFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceProtectedMemoryFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceProtectedMemoryProperties, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevicePushDescriptorPropertiesKHR, PhysicalDeviceProperties2>{ enum { value = true }; };
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+  template <> struct StructExtends<PhysicalDeviceRayTracingFeaturesKHR, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceRayTracingFeaturesKHR, DeviceCreateInfo>{ enum { value = true }; };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+  template <> struct StructExtends<PhysicalDeviceRayTracingPropertiesKHR, PhysicalDeviceProperties2>{ enum { value = true }; };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+  template <> struct StructExtends<PhysicalDeviceRayTracingPropertiesNV, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceRepresentativeFragmentTestFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceRepresentativeFragmentTestFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceRobustness2FeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceRobustness2FeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceRobustness2PropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceSampleLocationsPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceSamplerFilterMinmaxProperties, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceSamplerYcbcrConversionFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceSamplerYcbcrConversionFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceScalarBlockLayoutFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceScalarBlockLayoutFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceSeparateDepthStencilLayoutsFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceSeparateDepthStencilLayoutsFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderAtomicFloatFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderAtomicFloatFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderAtomicInt64Features, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderAtomicInt64Features, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderClockFeaturesKHR, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderClockFeaturesKHR, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderCoreProperties2AMD, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderCorePropertiesAMD, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderDrawParametersFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderDrawParametersFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderFloat16Int8Features, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderFloat16Int8Features, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderImageAtomicInt64FeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderImageAtomicInt64FeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderImageFootprintFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderImageFootprintFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderSMBuiltinsFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderSMBuiltinsFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderSMBuiltinsPropertiesNV, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderSubgroupExtendedTypesFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderSubgroupExtendedTypesFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderTerminateInvocationFeaturesKHR, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderTerminateInvocationFeaturesKHR, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShadingRateImageFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShadingRateImageFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShadingRateImagePropertiesNV, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceSubgroupProperties, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceSubgroupSizeControlFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceSubgroupSizeControlFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceSubgroupSizeControlPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceTexelBufferAlignmentFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceTexelBufferAlignmentFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceTexelBufferAlignmentPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceTimelineSemaphoreFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceTimelineSemaphoreFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceTimelineSemaphoreProperties, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceTransformFeedbackFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceTransformFeedbackFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceTransformFeedbackPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceUniformBufferStandardLayoutFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceUniformBufferStandardLayoutFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceVariablePointersFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceVariablePointersFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceVertexAttributeDivisorFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceVertexAttributeDivisorFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceVertexAttributeDivisorPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceVulkan11Features, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceVulkan11Features, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceVulkan11Properties, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceVulkan12Features, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceVulkan12Features, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceVulkan12Properties, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceVulkanMemoryModelFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceVulkanMemoryModelFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceYcbcrImageArraysFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceYcbcrImageArraysFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineColorBlendAdvancedStateCreateInfoEXT, PipelineColorBlendStateCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineCompilerControlCreateInfoAMD, GraphicsPipelineCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineCompilerControlCreateInfoAMD, ComputePipelineCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineCoverageModulationStateCreateInfoNV, PipelineMultisampleStateCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineCoverageReductionStateCreateInfoNV, PipelineMultisampleStateCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineCoverageToColorStateCreateInfoNV, PipelineMultisampleStateCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineCreationFeedbackCreateInfoEXT, GraphicsPipelineCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineCreationFeedbackCreateInfoEXT, ComputePipelineCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineCreationFeedbackCreateInfoEXT, RayTracingPipelineCreateInfoNV>{ enum { value = true }; };
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+  template <> struct StructExtends<PipelineCreationFeedbackCreateInfoEXT, RayTracingPipelineCreateInfoKHR>{ enum { value = true }; };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+  template <> struct StructExtends<PipelineDiscardRectangleStateCreateInfoEXT, GraphicsPipelineCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineFragmentShadingRateStateCreateInfoKHR, GraphicsPipelineCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineRasterizationConservativeStateCreateInfoEXT, PipelineRasterizationStateCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineRasterizationDepthClipStateCreateInfoEXT, PipelineRasterizationStateCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineRasterizationLineStateCreateInfoEXT, PipelineRasterizationStateCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineRasterizationStateRasterizationOrderAMD, PipelineRasterizationStateCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineRasterizationStateStreamCreateInfoEXT, PipelineRasterizationStateCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineRepresentativeFragmentTestStateCreateInfoNV, GraphicsPipelineCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineSampleLocationsStateCreateInfoEXT, PipelineMultisampleStateCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT, PipelineShaderStageCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineTessellationDomainOriginStateCreateInfo, PipelineTessellationStateCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineVertexInputDivisorStateCreateInfoEXT, PipelineVertexInputStateCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineViewportCoarseSampleOrderStateCreateInfoNV, PipelineViewportStateCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineViewportExclusiveScissorStateCreateInfoNV, PipelineViewportStateCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineViewportShadingRateImageStateCreateInfoNV, PipelineViewportStateCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineViewportSwizzleStateCreateInfoNV, PipelineViewportStateCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineViewportWScalingStateCreateInfoNV, PipelineViewportStateCreateInfo>{ enum { value = true }; };
+#ifdef VK_USE_PLATFORM_GGP
+  template <> struct StructExtends<PresentFrameTokenGGP, PresentInfoKHR>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_GGP*/
+  template <> struct StructExtends<PresentRegionsKHR, PresentInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<PresentTimesInfoGOOGLE, PresentInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<ProtectedSubmitInfo, SubmitInfo>{ enum { value = true }; };
+  template <> struct StructExtends<QueryPoolPerformanceCreateInfoKHR, QueryPoolCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<QueryPoolPerformanceQueryCreateInfoINTEL, QueryPoolCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<QueueFamilyCheckpointPropertiesNV, QueueFamilyProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<RenderPassAttachmentBeginInfo, RenderPassBeginInfo>{ enum { value = true }; };
+  template <> struct StructExtends<RenderPassFragmentDensityMapCreateInfoEXT, RenderPassCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<RenderPassFragmentDensityMapCreateInfoEXT, RenderPassCreateInfo2>{ enum { value = true }; };
+  template <> struct StructExtends<RenderPassInputAttachmentAspectCreateInfo, RenderPassCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<RenderPassMultiviewCreateInfo, RenderPassCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<RenderPassSampleLocationsBeginInfoEXT, RenderPassBeginInfo>{ enum { value = true }; };
+  template <> struct StructExtends<RenderPassTransformBeginInfoQCOM, RenderPassBeginInfo>{ enum { value = true }; };
+  template <> struct StructExtends<SampleLocationsInfoEXT, ImageMemoryBarrier>{ enum { value = true }; };
+  template <> struct StructExtends<SamplerCustomBorderColorCreateInfoEXT, SamplerCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<SamplerReductionModeCreateInfo, SamplerCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<SamplerYcbcrConversionImageFormatProperties, ImageFormatProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<SamplerYcbcrConversionInfo, SamplerCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<SamplerYcbcrConversionInfo, ImageViewCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<SemaphoreTypeCreateInfo, SemaphoreCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<SemaphoreTypeCreateInfo, PhysicalDeviceExternalSemaphoreInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ShaderModuleValidationCacheCreateInfoEXT, ShaderModuleCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<SharedPresentSurfaceCapabilitiesKHR, SurfaceCapabilities2KHR>{ enum { value = true }; };
+  template <> struct StructExtends<SubpassDescriptionDepthStencilResolve, SubpassDescription2>{ enum { value = true }; };
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  template <> struct StructExtends<SurfaceCapabilitiesFullScreenExclusiveEXT, SurfaceCapabilities2KHR>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  template <> struct StructExtends<SurfaceFullScreenExclusiveInfoEXT, PhysicalDeviceSurfaceInfo2KHR>{ enum { value = true }; };
+  template <> struct StructExtends<SurfaceFullScreenExclusiveInfoEXT, SwapchainCreateInfoKHR>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  template <> struct StructExtends<SurfaceFullScreenExclusiveWin32InfoEXT, PhysicalDeviceSurfaceInfo2KHR>{ enum { value = true }; };
+  template <> struct StructExtends<SurfaceFullScreenExclusiveWin32InfoEXT, SwapchainCreateInfoKHR>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+  template <> struct StructExtends<SurfaceProtectedCapabilitiesKHR, SurfaceCapabilities2KHR>{ enum { value = true }; };
+  template <> struct StructExtends<SwapchainCounterCreateInfoEXT, SwapchainCreateInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<SwapchainDisplayNativeHdrCreateInfoAMD, SwapchainCreateInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<TextureLODGatherFormatPropertiesAMD, ImageFormatProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<TimelineSemaphoreSubmitInfo, SubmitInfo>{ enum { value = true }; };
+  template <> struct StructExtends<TimelineSemaphoreSubmitInfo, BindSparseInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ValidationFeaturesEXT, InstanceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ValidationFlagsEXT, InstanceCreateInfo>{ enum { value = true }; };
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  template <> struct StructExtends<Win32KeyedMutexAcquireReleaseInfoKHR, SubmitInfo>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  template <> struct StructExtends<Win32KeyedMutexAcquireReleaseInfoNV, SubmitInfo>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+  template <> struct StructExtends<WriteDescriptorSetAccelerationStructureKHR, WriteDescriptorSet>{ enum { value = true }; };
+  template <> struct StructExtends<WriteDescriptorSetInlineUniformBlockEXT, WriteDescriptorSet>{ enum { value = true }; };
+
+#if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL
+  class DynamicLoader
+  {
+  public:
+#  ifdef VULKAN_HPP_NO_EXCEPTIONS
+    DynamicLoader( std::string const & vulkanLibraryName = {} ) VULKAN_HPP_NOEXCEPT
+#  else
+    DynamicLoader( std::string const & vulkanLibraryName = {} )
+#  endif
     {
-      result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr ) );
-      if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount )
+      if ( !vulkanLibraryName.empty() )
       {
-        physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
-        result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties*>( physicalDeviceGroupProperties.data() ) ) );
+#  if defined( __linux__ ) || defined( __APPLE__ )
+        m_library = dlopen( vulkanLibraryName.c_str(), RTLD_NOW | RTLD_LOCAL );
+#  elif defined( _WIN32 )
+        m_library = ::LoadLibraryA( vulkanLibraryName.c_str() );
+#  else
+#    error unsupported platform
+#  endif
       }
-    } while ( result == Result::eIncomplete );
-    VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
-    physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
-    return createResultValue( result, physicalDeviceGroupProperties, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDeviceGroups" );
-  }
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDeviceGroupProperties,Allocator>>::type Instance::enumeratePhysicalDeviceGroups(Allocator const& vectorAllocator, Dispatch const &d ) const
-  {
-    std::vector<PhysicalDeviceGroupProperties,Allocator> physicalDeviceGroupProperties( vectorAllocator );
-    uint32_t physicalDeviceGroupCount;
-    Result result;
-    do
-    {
-      result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr ) );
-      if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount )
+      else
       {
-        physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
-        result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties*>( physicalDeviceGroupProperties.data() ) ) );
+#  if defined( __linux__ )
+        m_library = dlopen( "libvulkan.so", RTLD_NOW | RTLD_LOCAL );
+        if ( m_library == nullptr )
+        {
+          m_library = dlopen( "libvulkan.so.1", RTLD_NOW | RTLD_LOCAL );
+        }
+#  elif defined( __APPLE__ )
+        m_library = dlopen( "libvulkan.dylib", RTLD_NOW | RTLD_LOCAL );
+#  elif defined( _WIN32 )
+        m_library = ::LoadLibraryA( "vulkan-1.dll" );
+#  else
+#    error unsupported platform
+#  endif
       }
-    } while ( result == Result::eIncomplete );
-    VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
-    physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
-    return createResultValue( result, physicalDeviceGroupProperties, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDeviceGroups" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
 
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDeviceGroupsKHR( uint32_t* pPhysicalDeviceGroupCount, PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, pPhysicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties*>( pPhysicalDeviceGroupProperties ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDeviceGroupProperties,Allocator>>::type Instance::enumeratePhysicalDeviceGroupsKHR(Dispatch const &d ) const
-  {
-    std::vector<PhysicalDeviceGroupProperties,Allocator> physicalDeviceGroupProperties;
-    uint32_t physicalDeviceGroupCount;
-    Result result;
-    do
-    {
-      result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr ) );
-      if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount )
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+      if ( m_library == nullptr )
       {
-        physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
-        result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties*>( physicalDeviceGroupProperties.data() ) ) );
+        // NOTE there should be an InitializationFailedError, but msvc insists on the symbol does not exist within the scope of this function.
+        throw std::runtime_error( "Failed to load vulkan library!" );
       }
-    } while ( result == Result::eIncomplete );
-    VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
-    physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
-    return createResultValue( result, physicalDeviceGroupProperties, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDeviceGroupsKHR" );
-  }
-  template <typename Allocator, typename Dispatch> 
-  VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDeviceGroupProperties,Allocator>>::type Instance::enumeratePhysicalDeviceGroupsKHR(Allocator const& vectorAllocator, Dispatch const &d ) const
-  {
-    std::vector<PhysicalDeviceGroupProperties,Allocator> physicalDeviceGroupProperties( vectorAllocator );
-    uint32_t physicalDeviceGroupCount;
-    Result result;
-    do
-    {
-      result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr ) );
-      if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount )
-      {
-        physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
-        result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties*>( physicalDeviceGroupProperties.data() ) ) );
-      }
-    } while ( result == Result::eIncomplete );
-    VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
-    physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
-    return createResultValue( result, physicalDeviceGroupProperties, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDeviceGroupsKHR" );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-#ifdef VK_USE_PLATFORM_IOS_MVK
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Instance::createIOSSurfaceMVK( const IOSSurfaceCreateInfoMVK* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkCreateIOSSurfaceMVK( m_instance, reinterpret_cast<const VkIOSSurfaceCreateInfoMVK*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<SurfaceKHR>::type Instance::createIOSSurfaceMVK( const IOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    SurfaceKHR surface;
-    Result result = static_cast<Result>( d.vkCreateIOSSurfaceMVK( m_instance, reinterpret_cast<const VkIOSSurfaceCreateInfoMVK*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
-    return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createIOSSurfaceMVK" );
-  }
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type Instance::createIOSSurfaceMVKUnique( const IOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    SurfaceKHR surface;
-    Result result = static_cast<Result>( d.vkCreateIOSSurfaceMVK( m_instance, reinterpret_cast<const VkIOSSurfaceCreateInfoMVK*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
-
-    ObjectDestroy<Instance,Dispatch> deleter( *this, allocator, d );
-    return createResultValue<SurfaceKHR,Dispatch>( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createIOSSurfaceMVKUnique", deleter );
-  }
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_IOS_MVK*/
-
-#ifdef VK_USE_PLATFORM_MACOS_MVK
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Instance::createMacOSSurfaceMVK( const MacOSSurfaceCreateInfoMVK* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkCreateMacOSSurfaceMVK( m_instance, reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<SurfaceKHR>::type Instance::createMacOSSurfaceMVK( const MacOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    SurfaceKHR surface;
-    Result result = static_cast<Result>( d.vkCreateMacOSSurfaceMVK( m_instance, reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
-    return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createMacOSSurfaceMVK" );
-  }
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type Instance::createMacOSSurfaceMVKUnique( const MacOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    SurfaceKHR surface;
-    Result result = static_cast<Result>( d.vkCreateMacOSSurfaceMVK( m_instance, reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
-
-    ObjectDestroy<Instance,Dispatch> deleter( *this, allocator, d );
-    return createResultValue<SurfaceKHR,Dispatch>( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createMacOSSurfaceMVKUnique", deleter );
-  }
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-#endif /*VK_USE_PLATFORM_MACOS_MVK*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result Instance::createDebugUtilsMessengerEXT( const DebugUtilsMessengerCreateInfoEXT* pCreateInfo, const AllocationCallbacks* pAllocator, DebugUtilsMessengerEXT* pMessenger, Dispatch const &d) const
-  {
-    return static_cast<Result>( d.vkCreateDebugUtilsMessengerEXT( m_instance, reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDebugUtilsMessengerEXT*>( pMessenger ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<DebugUtilsMessengerEXT>::type Instance::createDebugUtilsMessengerEXT( const DebugUtilsMessengerCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    DebugUtilsMessengerEXT messenger;
-    Result result = static_cast<Result>( d.vkCreateDebugUtilsMessengerEXT( m_instance, reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDebugUtilsMessengerEXT*>( &messenger ) ) );
-    return createResultValue( result, messenger, VULKAN_HPP_NAMESPACE_STRING"::Instance::createDebugUtilsMessengerEXT" );
-  }
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<DebugUtilsMessengerEXT,Dispatch>>::type Instance::createDebugUtilsMessengerEXTUnique( const DebugUtilsMessengerCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    DebugUtilsMessengerEXT messenger;
-    Result result = static_cast<Result>( d.vkCreateDebugUtilsMessengerEXT( m_instance, reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDebugUtilsMessengerEXT*>( &messenger ) ) );
-
-    ObjectDestroy<Instance,Dispatch> deleter( *this, allocator, d );
-    return createResultValue<DebugUtilsMessengerEXT,Dispatch>( result, messenger, VULKAN_HPP_NAMESPACE_STRING"::Instance::createDebugUtilsMessengerEXTUnique", deleter );
-  }
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Instance::destroyDebugUtilsMessengerEXT( DebugUtilsMessengerEXT messenger, const AllocationCallbacks* pAllocator, Dispatch const &d) const
-  {
-    d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast<VkDebugUtilsMessengerEXT>( messenger ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Instance::destroyDebugUtilsMessengerEXT( DebugUtilsMessengerEXT messenger, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast<VkDebugUtilsMessengerEXT>( messenger ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Instance::destroy( DebugUtilsMessengerEXT messenger, const AllocationCallbacks* pAllocator, Dispatch const &d) const
-  {
-    d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast<VkDebugUtilsMessengerEXT>( messenger ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Instance::destroy( DebugUtilsMessengerEXT messenger, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
-  {
-    d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast<VkDebugUtilsMessengerEXT>( messenger ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, DebugUtilsMessageTypeFlagsEXT messageTypes, const DebugUtilsMessengerCallbackDataEXT* pCallbackData, Dispatch const &d) const
-  {
-    d.vkSubmitDebugUtilsMessageEXT( m_instance, static_cast<VkDebugUtilsMessageSeverityFlagBitsEXT>( messageSeverity ), static_cast<VkDebugUtilsMessageTypeFlagsEXT>( messageTypes ), reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT*>( pCallbackData ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, DebugUtilsMessageTypeFlagsEXT messageTypes, const DebugUtilsMessengerCallbackDataEXT & callbackData, Dispatch const &d ) const
-  {
-    d.vkSubmitDebugUtilsMessageEXT( m_instance, static_cast<VkDebugUtilsMessageSeverityFlagBitsEXT>( messageSeverity ), static_cast<VkDebugUtilsMessageTypeFlagsEXT>( messageTypes ), reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT*>( &callbackData ) );
-  }
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  struct DeviceGroupDeviceCreateInfo
-  {
-    DeviceGroupDeviceCreateInfo( uint32_t physicalDeviceCount_ = 0,
-                                 const PhysicalDevice* pPhysicalDevices_ = nullptr )
-      : physicalDeviceCount( physicalDeviceCount_ )
-      , pPhysicalDevices( pPhysicalDevices_ )
-    {
+#endif
     }
 
-    DeviceGroupDeviceCreateInfo( VkDeviceGroupDeviceCreateInfo const & rhs )
+    DynamicLoader( DynamicLoader const& ) = delete;
+
+    DynamicLoader( DynamicLoader && other ) VULKAN_HPP_NOEXCEPT : m_library(other.m_library)
     {
-      memcpy( this, &rhs, sizeof( DeviceGroupDeviceCreateInfo ) );
+      other.m_library = nullptr;
     }
 
-    DeviceGroupDeviceCreateInfo& operator=( VkDeviceGroupDeviceCreateInfo const & rhs )
+    DynamicLoader &operator=( DynamicLoader const& ) = delete;
+
+    DynamicLoader &operator=( DynamicLoader && other ) VULKAN_HPP_NOEXCEPT
     {
-      memcpy( this, &rhs, sizeof( DeviceGroupDeviceCreateInfo ) );
-      return *this;
-    }
-    DeviceGroupDeviceCreateInfo& setPNext( const void* pNext_ )
-    {
-      pNext = pNext_;
+      std::swap(m_library, other.m_library);
       return *this;
     }
 
-    DeviceGroupDeviceCreateInfo& setPhysicalDeviceCount( uint32_t physicalDeviceCount_ )
+    ~DynamicLoader() VULKAN_HPP_NOEXCEPT
     {
-      physicalDeviceCount = physicalDeviceCount_;
-      return *this;
+      if ( m_library )
+      {
+#  if defined( __linux__ ) || defined( __APPLE__ )
+        dlclose( m_library );
+#  elif defined( _WIN32 )
+        ::FreeLibrary( m_library );
+#  else
+#    error unsupported platform
+#  endif
+      }
     }
 
-    DeviceGroupDeviceCreateInfo& setPPhysicalDevices( const PhysicalDevice* pPhysicalDevices_ )
+    template <typename T>
+    T getProcAddress( const char* function ) const VULKAN_HPP_NOEXCEPT
     {
-      pPhysicalDevices = pPhysicalDevices_;
-      return *this;
+#  if defined( __linux__ ) || defined( __APPLE__ )
+      return (T)dlsym( m_library, function );
+#  elif defined( _WIN32 )
+      return (T)::GetProcAddress( m_library, function );
+#  else
+#    error unsupported platform
+#  endif
     }
 
-    operator VkDeviceGroupDeviceCreateInfo const&() const
-    {
-      return *reinterpret_cast<const VkDeviceGroupDeviceCreateInfo*>(this);
-    }
-
-    operator VkDeviceGroupDeviceCreateInfo &()
-    {
-      return *reinterpret_cast<VkDeviceGroupDeviceCreateInfo*>(this);
-    }
-
-    bool operator==( DeviceGroupDeviceCreateInfo const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext )
-          && ( physicalDeviceCount == rhs.physicalDeviceCount )
-          && ( pPhysicalDevices == rhs.pPhysicalDevices );
-    }
-
-    bool operator!=( DeviceGroupDeviceCreateInfo const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
+    bool success() const VULKAN_HPP_NOEXCEPT { return m_library != nullptr; }
 
   private:
-    StructureType sType = StructureType::eDeviceGroupDeviceCreateInfo;
-
-  public:
-    const void* pNext = nullptr;
-    uint32_t physicalDeviceCount;
-    const PhysicalDevice* pPhysicalDevices;
+#  if defined( __linux__ ) || defined( __APPLE__ )
+    void * m_library;
+#  elif defined( _WIN32 )
+    ::HINSTANCE m_library;
+#  else
+#    error unsupported platform
+#  endif
   };
-  static_assert( sizeof( DeviceGroupDeviceCreateInfo ) == sizeof( VkDeviceGroupDeviceCreateInfo ), "struct and wrapper have different size!" );
-
-  using DeviceGroupDeviceCreateInfoKHR = DeviceGroupDeviceCreateInfo;
-
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-
-  template <typename Dispatch> class UniqueHandleTraits<Instance,Dispatch> {public: using deleter = ObjectDestroy<NoParent,Dispatch>; };
-  using UniqueInstance = UniqueHandle<Instance,DispatchLoaderStatic>;
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-
-  template<typename Dispatch = DispatchLoaderStatic>
-  Result createInstance( const InstanceCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Instance* pInstance, Dispatch const &d = Dispatch() );
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch = DispatchLoaderStatic>
-  ResultValueType<Instance>::type createInstance( const InstanceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() );
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-  template<typename Dispatch = DispatchLoaderStatic>
-  typename ResultValueType<UniqueHandle<Instance,Dispatch>>::type createInstanceUnique( const InstanceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = Dispatch() );
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE Result createInstance( const InstanceCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Instance* pInstance, Dispatch const &d)
-  {
-    return static_cast<Result>( d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkInstance*>( pInstance ) ) );
-  }
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE ResultValueType<Instance>::type createInstance( const InstanceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d )
-  {
-    Instance instance;
-    Result result = static_cast<Result>( d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkInstance*>( &instance ) ) );
-    return createResultValue( result, instance, VULKAN_HPP_NAMESPACE_STRING"::createInstance" );
-  }
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
-  template<typename Dispatch>
-  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Instance,Dispatch>>::type createInstanceUnique( const InstanceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d )
-  {
-    Instance instance;
-    Result result = static_cast<Result>( d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkInstance*>( &instance ) ) );
-
-    ObjectDestroy<NoParent,Dispatch> deleter( allocator, d );
-    return createResultValue<Instance,Dispatch>( result, instance, VULKAN_HPP_NAMESPACE_STRING"::createInstanceUnique", deleter );
-  }
-#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
-
-  struct BaseOutStructure
-  {
-    BaseOutStructure(  )
-    {
-    }
-
-    BaseOutStructure( VkBaseOutStructure const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( BaseOutStructure ) );
-    }
-
-    BaseOutStructure& operator=( VkBaseOutStructure const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( BaseOutStructure ) );
-      return *this;
-    }
-    BaseOutStructure& setPNext( struct BaseOutStructure* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    operator VkBaseOutStructure const&() const
-    {
-      return *reinterpret_cast<const VkBaseOutStructure*>(this);
-    }
-
-    operator VkBaseOutStructure &()
-    {
-      return *reinterpret_cast<VkBaseOutStructure*>(this);
-    }
-
-    bool operator==( BaseOutStructure const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext );
-    }
-
-    bool operator!=( BaseOutStructure const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    StructureType sType;
-    struct BaseOutStructure* pNext = nullptr;
-  };
-  static_assert( sizeof( BaseOutStructure ) == sizeof( VkBaseOutStructure ), "struct and wrapper have different size!" );
-
-  struct BaseInStructure
-  {
-    BaseInStructure(  )
-    {
-    }
-
-    BaseInStructure( VkBaseInStructure const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( BaseInStructure ) );
-    }
-
-    BaseInStructure& operator=( VkBaseInStructure const & rhs )
-    {
-      memcpy( this, &rhs, sizeof( BaseInStructure ) );
-      return *this;
-    }
-    BaseInStructure& setPNext( const struct BaseInStructure* pNext_ )
-    {
-      pNext = pNext_;
-      return *this;
-    }
-
-    operator VkBaseInStructure const&() const
-    {
-      return *reinterpret_cast<const VkBaseInStructure*>(this);
-    }
-
-    operator VkBaseInStructure &()
-    {
-      return *reinterpret_cast<VkBaseInStructure*>(this);
-    }
-
-    bool operator==( BaseInStructure const& rhs ) const
-    {
-      return ( sType == rhs.sType )
-          && ( pNext == rhs.pNext );
-    }
-
-    bool operator!=( BaseInStructure const& rhs ) const
-    {
-      return !operator==( rhs );
-    }
-
-    StructureType sType;
-    const struct BaseInStructure* pNext = nullptr;
-  };
-  static_assert( sizeof( BaseInStructure ) == sizeof( VkBaseInStructure ), "struct and wrapper have different size!" );
-
-  template <> struct isStructureChainValid<PresentInfoKHR, DisplayPresentInfoKHR>{ enum { value = true }; };
-  template <> struct isStructureChainValid<ImageCreateInfo, DedicatedAllocationImageCreateInfoNV>{ enum { value = true }; };
-  template <> struct isStructureChainValid<BufferCreateInfo, DedicatedAllocationBufferCreateInfoNV>{ enum { value = true }; };
-  template <> struct isStructureChainValid<MemoryAllocateInfo, DedicatedAllocationMemoryAllocateInfoNV>{ enum { value = true }; };
-#ifdef VK_USE_PLATFORM_WIN32_NV
-  template <> struct isStructureChainValid<MemoryAllocateInfo, ExportMemoryWin32HandleInfoNV>{ enum { value = true }; };
-#endif /*VK_USE_PLATFORM_WIN32_NV*/
-#ifdef VK_USE_PLATFORM_WIN32_NV
-  template <> struct isStructureChainValid<SubmitInfo, Win32KeyedMutexAcquireReleaseInfoNV>{ enum { value = true }; };
-#endif /*VK_USE_PLATFORM_WIN32_NV*/
-  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceFeatures2>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDevicePushDescriptorPropertiesKHR>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PresentInfoKHR, PresentRegionsKHR>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceVariablePointerFeatures>{ enum { value = true }; };
-  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceVariablePointerFeatures>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceIDProperties>{ enum { value = true }; };
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-  template <> struct isStructureChainValid<MemoryAllocateInfo, ExportMemoryWin32HandleInfoKHR>{ enum { value = true }; };
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-  template <> struct isStructureChainValid<SubmitInfo, Win32KeyedMutexAcquireReleaseInfoKHR>{ enum { value = true }; };
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-  template <> struct isStructureChainValid<SemaphoreCreateInfo, ExportSemaphoreWin32HandleInfoKHR>{ enum { value = true }; };
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-  template <> struct isStructureChainValid<SubmitInfo, D3D12FenceSubmitInfoKHR>{ enum { value = true }; };
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-  template <> struct isStructureChainValid<FenceCreateInfo, ExportFenceWin32HandleInfoKHR>{ enum { value = true }; };
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceMultiviewFeatures>{ enum { value = true }; };
-  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceMultiviewFeatures>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceMultiviewProperties>{ enum { value = true }; };
-  template <> struct isStructureChainValid<RenderPassCreateInfo, RenderPassMultiviewCreateInfo>{ enum { value = true }; };
-  template <> struct isStructureChainValid<BindBufferMemoryInfo, BindBufferMemoryDeviceGroupInfo>{ enum { value = true }; };
-  template <> struct isStructureChainValid<BindImageMemoryInfo, BindImageMemoryDeviceGroupInfo>{ enum { value = true }; };
-  template <> struct isStructureChainValid<RenderPassBeginInfo, DeviceGroupRenderPassBeginInfo>{ enum { value = true }; };
-  template <> struct isStructureChainValid<CommandBufferBeginInfo, DeviceGroupCommandBufferBeginInfo>{ enum { value = true }; };
-  template <> struct isStructureChainValid<SubmitInfo, DeviceGroupSubmitInfo>{ enum { value = true }; };
-  template <> struct isStructureChainValid<BindSparseInfo, DeviceGroupBindSparseInfo>{ enum { value = true }; };
-  template <> struct isStructureChainValid<ImageCreateInfo, ImageSwapchainCreateInfoKHR>{ enum { value = true }; };
-  template <> struct isStructureChainValid<BindImageMemoryInfo, BindImageMemorySwapchainInfoKHR>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PresentInfoKHR, PresentTimesInfoGOOGLE>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PipelineViewportStateCreateInfo, PipelineViewportWScalingStateCreateInfoNV>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceDiscardRectanglePropertiesEXT>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDevice16BitStorageFeatures>{ enum { value = true }; };
-  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDevice16BitStorageFeatures>{ enum { value = true }; };
-  template <> struct isStructureChainValid<MemoryRequirements2, MemoryDedicatedRequirements>{ enum { value = true }; };
-  template <> struct isStructureChainValid<MemoryAllocateInfo, MemoryDedicatedAllocateInfo>{ enum { value = true }; };
-  template <> struct isStructureChainValid<SamplerCreateInfo, SamplerYcbcrConversionInfo>{ enum { value = true }; };
-  template <> struct isStructureChainValid<ImageViewCreateInfo, SamplerYcbcrConversionInfo>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceSamplerYcbcrConversionFeatures>{ enum { value = true }; };
-  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceSamplerYcbcrConversionFeatures>{ enum { value = true }; };
-  template <> struct isStructureChainValid<ImageFormatProperties2, SamplerYcbcrConversionImageFormatProperties>{ enum { value = true }; };
-  template <> struct isStructureChainValid<ImageFormatProperties2, TextureLODGatherFormatPropertiesAMD>{ enum { value = true }; };
-  template <> struct isStructureChainValid<SubmitInfo, ProtectedSubmitInfo>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceProtectedMemoryFeatures>{ enum { value = true }; };
-  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceProtectedMemoryFeatures>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceProtectedMemoryProperties>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PipelineMultisampleStateCreateInfo, PipelineCoverageToColorStateCreateInfoNV>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceSamplerFilterMinmaxPropertiesEXT>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceBlendOperationAdvancedFeaturesEXT>{ enum { value = true }; };
-  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceBlendOperationAdvancedFeaturesEXT>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceBlendOperationAdvancedPropertiesEXT>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceInlineUniformBlockFeaturesEXT>{ enum { value = true }; };
-  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceInlineUniformBlockFeaturesEXT>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceInlineUniformBlockPropertiesEXT>{ enum { value = true }; };
-  template <> struct isStructureChainValid<WriteDescriptorSet, WriteDescriptorSetInlineUniformBlockEXT>{ enum { value = true }; };
-  template <> struct isStructureChainValid<DescriptorPoolCreateInfo, DescriptorPoolInlineUniformBlockCreateInfoEXT>{ enum { value = true }; };
-  template <> struct isStructureChainValid<ImageCreateInfo, ImageFormatListCreateInfoKHR>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PhysicalDeviceImageFormatInfo2, ImageFormatListCreateInfoKHR>{ enum { value = true }; };
-  template <> struct isStructureChainValid<ShaderModuleCreateInfo, ShaderModuleValidationCacheCreateInfoEXT>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceMaintenance3Properties>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceShaderDrawParameterFeatures>{ enum { value = true }; };
-  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceShaderDrawParameterFeatures>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceExternalMemoryHostPropertiesEXT>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceConservativeRasterizationPropertiesEXT>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceShaderCorePropertiesAMD>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceDescriptorIndexingFeaturesEXT>{ enum { value = true }; };
-  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceDescriptorIndexingFeaturesEXT>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceDescriptorIndexingPropertiesEXT>{ enum { value = true }; };
-  template <> struct isStructureChainValid<DescriptorSetAllocateInfo, DescriptorSetVariableDescriptorCountAllocateInfoEXT>{ enum { value = true }; };
-  template <> struct isStructureChainValid<DescriptorSetLayoutSupport, DescriptorSetVariableDescriptorCountLayoutSupportEXT>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PipelineVertexInputStateCreateInfo, PipelineVertexInputDivisorStateCreateInfoEXT>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceVertexAttributeDivisorPropertiesEXT>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDevicePCIBusInfoPropertiesEXT>{ enum { value = true }; };
-#ifdef VK_USE_PLATFORM_ANDROID_ANDROID
-  template <> struct isStructureChainValid<MemoryAllocateInfo, ImportAndroidHardwareBufferInfoANDROID>{ enum { value = true }; };
-#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/
-#ifdef VK_USE_PLATFORM_ANDROID_ANDROID
-  template <> struct isStructureChainValid<ImageFormatProperties2, AndroidHardwareBufferUsageANDROID>{ enum { value = true }; };
-#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/
-  template <> struct isStructureChainValid<CommandBufferInheritanceInfo, CommandBufferInheritanceConditionalRenderingInfoEXT>{ enum { value = true }; };
-#ifdef VK_USE_PLATFORM_ANDROID_ANDROID
-  template <> struct isStructureChainValid<ImageCreateInfo, ExternalFormatANDROID>{ enum { value = true }; };
-  template <> struct isStructureChainValid<SamplerYcbcrConversionCreateInfo, ExternalFormatANDROID>{ enum { value = true }; };
-#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/
-  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDevice8BitStorageFeaturesKHR>{ enum { value = true }; };
-  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDevice8BitStorageFeaturesKHR>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceConditionalRenderingFeaturesEXT>{ enum { value = true }; };
-  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceConditionalRenderingFeaturesEXT>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceVulkanMemoryModelFeaturesKHR>{ enum { value = true }; };
-  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceVulkanMemoryModelFeaturesKHR>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceShaderAtomicInt64FeaturesKHR>{ enum { value = true }; };
-  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceShaderAtomicInt64FeaturesKHR>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceVertexAttributeDivisorFeaturesEXT>{ enum { value = true }; };
-  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceVertexAttributeDivisorFeaturesEXT>{ enum { value = true }; };
-  template <> struct isStructureChainValid<ImageViewCreateInfo, ImageViewASTCDecodeModeEXT>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceASTCDecodeFeaturesEXT>{ enum { value = true }; };
-  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceASTCDecodeFeaturesEXT>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceTransformFeedbackFeaturesEXT>{ enum { value = true }; };
-  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceTransformFeedbackFeaturesEXT>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceTransformFeedbackPropertiesEXT>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PipelineRasterizationStateCreateInfo, PipelineRasterizationStateStreamCreateInfoEXT>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceRepresentativeFragmentTestFeaturesNV>{ enum { value = true }; };
-  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceRepresentativeFragmentTestFeaturesNV>{ enum { value = true }; };
-  template <> struct isStructureChainValid<GraphicsPipelineCreateInfo, PipelineRepresentativeFragmentTestStateCreateInfoNV>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceExclusiveScissorFeaturesNV>{ enum { value = true }; };
-  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceExclusiveScissorFeaturesNV>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PipelineViewportStateCreateInfo, PipelineViewportExclusiveScissorStateCreateInfoNV>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceCornerSampledImageFeaturesNV>{ enum { value = true }; };
-  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceCornerSampledImageFeaturesNV>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceComputeShaderDerivativesFeaturesNV>{ enum { value = true }; };
-  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceComputeShaderDerivativesFeaturesNV>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceFragmentShaderBarycentricFeaturesNV>{ enum { value = true }; };
-  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceFragmentShaderBarycentricFeaturesNV>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceShaderImageFootprintFeaturesNV>{ enum { value = true }; };
-  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceShaderImageFootprintFeaturesNV>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceShadingRateImageFeaturesNV>{ enum { value = true }; };
-  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceShadingRateImageFeaturesNV>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PhysicalDeviceProperties, PhysicalDeviceShadingRateImagePropertiesNV>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceMeshShaderFeaturesNV>{ enum { value = true }; };
-  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceMeshShaderFeaturesNV>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceMeshShaderPropertiesNV>{ enum { value = true }; };
-  template <> struct isStructureChainValid<WriteDescriptorSet, WriteDescriptorSetAccelerationStructureNV>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceRayTracingPropertiesNV>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PhysicalDeviceImageFormatInfo2, PhysicalDeviceImageDrmFormatModifierInfoEXT>{ enum { value = true }; };
-  template <> struct isStructureChainValid<ImageCreateInfo, ImageDrmFormatModifierListCreateInfoEXT>{ enum { value = true }; };
-  template <> struct isStructureChainValid<ImageCreateInfo, ImageDrmFormatModifierExplicitCreateInfoEXT>{ enum { value = true }; };
-  template <> struct isStructureChainValid<SurfaceCapabilities2KHR, SharedPresentSurfaceCapabilitiesKHR>{ enum { value = true }; };
-  template <> struct isStructureChainValid<ImageViewCreateInfo, ImageViewUsageCreateInfo>{ enum { value = true }; };
-  template <> struct isStructureChainValid<FormatProperties2, DrmFormatModifierPropertiesListEXT>{ enum { value = true }; };
-  template <> struct isStructureChainValid<RenderPassCreateInfo, RenderPassInputAttachmentAspectCreateInfo>{ enum { value = true }; };
-  template <> struct isStructureChainValid<BindImageMemoryInfo, BindImagePlaneMemoryInfo>{ enum { value = true }; };
-  template <> struct isStructureChainValid<ImageMemoryRequirementsInfo2, ImagePlaneMemoryRequirementsInfo>{ enum { value = true }; };
-  template <> struct isStructureChainValid<QueueFamilyProperties2, QueueFamilyCheckpointPropertiesNV>{ enum { value = true }; };
-  template <> struct isStructureChainValid<ImageMemoryBarrier, SampleLocationsInfoEXT>{ enum { value = true }; };
-  template <> struct isStructureChainValid<RenderPassBeginInfo, RenderPassSampleLocationsBeginInfoEXT>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PipelineMultisampleStateCreateInfo, PipelineSampleLocationsStateCreateInfoEXT>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceSampleLocationsPropertiesEXT>{ enum { value = true }; };
-  template <> struct isStructureChainValid<InstanceCreateInfo, DebugReportCallbackCreateInfoEXT>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PipelineRasterizationStateCreateInfo, PipelineRasterizationStateRasterizationOrderAMD>{ enum { value = true }; };
-  template <> struct isStructureChainValid<ImageCreateInfo, ExternalMemoryImageCreateInfoNV>{ enum { value = true }; };
-  template <> struct isStructureChainValid<MemoryAllocateInfo, ExportMemoryAllocateInfoNV>{ enum { value = true }; };
-#ifdef VK_USE_PLATFORM_WIN32_NV
-  template <> struct isStructureChainValid<MemoryAllocateInfo, ImportMemoryWin32HandleInfoNV>{ enum { value = true }; };
-#endif /*VK_USE_PLATFORM_WIN32_NV*/
-  template <> struct isStructureChainValid<InstanceCreateInfo, ValidationFlagsEXT>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceSubgroupProperties>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PhysicalDeviceImageFormatInfo2, PhysicalDeviceExternalImageFormatInfo>{ enum { value = true }; };
-  template <> struct isStructureChainValid<ImageCreateInfo, ExternalMemoryImageCreateInfo>{ enum { value = true }; };
-  template <> struct isStructureChainValid<BufferCreateInfo, ExternalMemoryBufferCreateInfo>{ enum { value = true }; };
-  template <> struct isStructureChainValid<MemoryAllocateInfo, ExportMemoryAllocateInfo>{ enum { value = true }; };
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-  template <> struct isStructureChainValid<MemoryAllocateInfo, ImportMemoryWin32HandleInfoKHR>{ enum { value = true }; };
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-  template <> struct isStructureChainValid<MemoryAllocateInfo, ImportMemoryFdInfoKHR>{ enum { value = true }; };
-  template <> struct isStructureChainValid<MemoryAllocateInfo, ImportMemoryHostPointerInfoEXT>{ enum { value = true }; };
-  template <> struct isStructureChainValid<ImageFormatProperties2, ExternalImageFormatProperties>{ enum { value = true }; };
-  template <> struct isStructureChainValid<SemaphoreCreateInfo, ExportSemaphoreCreateInfo>{ enum { value = true }; };
-  template <> struct isStructureChainValid<FenceCreateInfo, ExportFenceCreateInfo>{ enum { value = true }; };
-  template <> struct isStructureChainValid<SwapchainCreateInfoKHR, SwapchainCounterCreateInfoEXT>{ enum { value = true }; };
-  template <> struct isStructureChainValid<MemoryAllocateInfo, MemoryAllocateFlagsInfo>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PresentInfoKHR, DeviceGroupPresentInfoKHR>{ enum { value = true }; };
-  template <> struct isStructureChainValid<SwapchainCreateInfoKHR, DeviceGroupSwapchainCreateInfoKHR>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PipelineViewportStateCreateInfo, PipelineViewportSwizzleStateCreateInfoNV>{ enum { value = true }; };
-  template <> struct isStructureChainValid<GraphicsPipelineCreateInfo, PipelineDiscardRectangleStateCreateInfoEXT>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDevicePointClippingProperties>{ enum { value = true }; };
-  template <> struct isStructureChainValid<SamplerCreateInfo, SamplerReductionModeCreateInfoEXT>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PipelineTessellationStateCreateInfo, PipelineTessellationDomainOriginStateCreateInfo>{ enum { value = true }; };
-#ifdef VK_USE_PLATFORM_ANDROID_ANDROID
-#ifdef VK_USE_PLATFORM_ANDROID_ANDROID
-  template <> struct isStructureChainValid<AndroidHardwareBufferPropertiesANDROID, AndroidHardwareBufferFormatPropertiesANDROID>{ enum { value = true }; };
-#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/
-#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/
-  template <> struct isStructureChainValid<PipelineColorBlendStateCreateInfo, PipelineColorBlendAdvancedStateCreateInfoEXT>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PipelineMultisampleStateCreateInfo, PipelineCoverageModulationStateCreateInfoNV>{ enum { value = true }; };
-  template <> struct isStructureChainValid<DeviceQueueCreateInfo, DeviceQueueGlobalPriorityCreateInfoEXT>{ enum { value = true }; };
-  template <> struct isStructureChainValid<InstanceCreateInfo, DebugUtilsMessengerCreateInfoEXT>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PipelineRasterizationStateCreateInfo, PipelineRasterizationConservativeStateCreateInfoEXT>{ enum { value = true }; };
-  template <> struct isStructureChainValid<DescriptorSetLayoutCreateInfo, DescriptorSetLayoutBindingFlagsCreateInfoEXT>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceDriverPropertiesKHR>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PipelineViewportStateCreateInfo, PipelineViewportShadingRateImageStateCreateInfoNV>{ enum { value = true }; };
-  template <> struct isStructureChainValid<PipelineViewportStateCreateInfo, PipelineViewportCoarseSampleOrderStateCreateInfoNV>{ enum { value = true }; };
-  template <> struct isStructureChainValid<DeviceCreateInfo, DeviceMemoryOverallocationCreateInfoAMD>{ enum { value = true }; };
-  template <> struct isStructureChainValid<DeviceCreateInfo, DeviceGroupDeviceCreateInfo>{ enum { value = true }; };
-  VULKAN_HPP_INLINE std::string to_string(FramebufferCreateFlagBits)
-  {
-    return "(void)";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(FramebufferCreateFlags)
-  {
-    return "{}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(QueryPoolCreateFlagBits)
-  {
-    return "(void)";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(QueryPoolCreateFlags)
-  {
-    return "{}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(RenderPassCreateFlagBits)
-  {
-    return "(void)";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(RenderPassCreateFlags)
-  {
-    return "{}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(SamplerCreateFlagBits)
-  {
-    return "(void)";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(SamplerCreateFlags)
-  {
-    return "{}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(PipelineLayoutCreateFlagBits)
-  {
-    return "(void)";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(PipelineLayoutCreateFlags)
-  {
-    return "{}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(PipelineCacheCreateFlagBits)
-  {
-    return "(void)";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(PipelineCacheCreateFlags)
-  {
-    return "{}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(PipelineDepthStencilStateCreateFlagBits)
-  {
-    return "(void)";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(PipelineDepthStencilStateCreateFlags)
-  {
-    return "{}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(PipelineDynamicStateCreateFlagBits)
-  {
-    return "(void)";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(PipelineDynamicStateCreateFlags)
-  {
-    return "{}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(PipelineColorBlendStateCreateFlagBits)
-  {
-    return "(void)";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(PipelineColorBlendStateCreateFlags)
-  {
-    return "{}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(PipelineMultisampleStateCreateFlagBits)
-  {
-    return "(void)";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(PipelineMultisampleStateCreateFlags)
-  {
-    return "{}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(PipelineRasterizationStateCreateFlagBits)
-  {
-    return "(void)";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(PipelineRasterizationStateCreateFlags)
-  {
-    return "{}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(PipelineViewportStateCreateFlagBits)
-  {
-    return "(void)";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(PipelineViewportStateCreateFlags)
-  {
-    return "{}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(PipelineTessellationStateCreateFlagBits)
-  {
-    return "(void)";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(PipelineTessellationStateCreateFlags)
-  {
-    return "{}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(PipelineInputAssemblyStateCreateFlagBits)
-  {
-    return "(void)";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(PipelineInputAssemblyStateCreateFlags)
-  {
-    return "{}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(PipelineVertexInputStateCreateFlagBits)
-  {
-    return "(void)";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(PipelineVertexInputStateCreateFlags)
-  {
-    return "{}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(PipelineShaderStageCreateFlagBits)
-  {
-    return "(void)";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(PipelineShaderStageCreateFlags)
-  {
-    return "{}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(BufferViewCreateFlagBits)
-  {
-    return "(void)";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(BufferViewCreateFlags)
-  {
-    return "{}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(InstanceCreateFlagBits)
-  {
-    return "(void)";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(InstanceCreateFlags)
-  {
-    return "{}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(DeviceCreateFlagBits)
-  {
-    return "(void)";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(DeviceCreateFlags)
-  {
-    return "{}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(ImageViewCreateFlagBits)
-  {
-    return "(void)";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(ImageViewCreateFlags)
-  {
-    return "{}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(SemaphoreCreateFlagBits)
-  {
-    return "(void)";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(SemaphoreCreateFlags)
-  {
-    return "{}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(ShaderModuleCreateFlagBits)
-  {
-    return "(void)";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(ShaderModuleCreateFlags)
-  {
-    return "{}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(EventCreateFlagBits)
-  {
-    return "(void)";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(EventCreateFlags)
-  {
-    return "{}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(MemoryMapFlagBits)
-  {
-    return "(void)";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(MemoryMapFlags)
-  {
-    return "{}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(DescriptorPoolResetFlagBits)
-  {
-    return "(void)";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(DescriptorPoolResetFlags)
-  {
-    return "{}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(DescriptorUpdateTemplateCreateFlagBits)
-  {
-    return "(void)";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(DescriptorUpdateTemplateCreateFlags)
-  {
-    return "{}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(DisplayModeCreateFlagBitsKHR)
-  {
-    return "(void)";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(DisplayModeCreateFlagsKHR)
-  {
-    return "{}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(DisplaySurfaceCreateFlagBitsKHR)
-  {
-    return "(void)";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(DisplaySurfaceCreateFlagsKHR)
-  {
-    return "{}";
-  }
-
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
-  VULKAN_HPP_INLINE std::string to_string(AndroidSurfaceCreateFlagBitsKHR)
-  {
-    return "(void)";
-  }
-#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
-
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
-  VULKAN_HPP_INLINE std::string to_string(AndroidSurfaceCreateFlagsKHR)
-  {
-    return "{}";
-  }
-#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
-
-#ifdef VK_USE_PLATFORM_VI_NN
-  VULKAN_HPP_INLINE std::string to_string(ViSurfaceCreateFlagBitsNN)
-  {
-    return "(void)";
-  }
-#endif /*VK_USE_PLATFORM_VI_NN*/
-
-#ifdef VK_USE_PLATFORM_VI_NN
-  VULKAN_HPP_INLINE std::string to_string(ViSurfaceCreateFlagsNN)
-  {
-    return "{}";
-  }
-#endif /*VK_USE_PLATFORM_VI_NN*/
-
-#ifdef VK_USE_PLATFORM_WAYLAND_KHR
-  VULKAN_HPP_INLINE std::string to_string(WaylandSurfaceCreateFlagBitsKHR)
-  {
-    return "(void)";
-  }
-#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
-
-#ifdef VK_USE_PLATFORM_WAYLAND_KHR
-  VULKAN_HPP_INLINE std::string to_string(WaylandSurfaceCreateFlagsKHR)
-  {
-    return "{}";
-  }
-#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-  VULKAN_HPP_INLINE std::string to_string(Win32SurfaceCreateFlagBitsKHR)
-  {
-    return "(void)";
-  }
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-  VULKAN_HPP_INLINE std::string to_string(Win32SurfaceCreateFlagsKHR)
-  {
-    return "{}";
-  }
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-
-#ifdef VK_USE_PLATFORM_XLIB_KHR
-  VULKAN_HPP_INLINE std::string to_string(XlibSurfaceCreateFlagBitsKHR)
-  {
-    return "(void)";
-  }
-#endif /*VK_USE_PLATFORM_XLIB_KHR*/
-
-#ifdef VK_USE_PLATFORM_XLIB_KHR
-  VULKAN_HPP_INLINE std::string to_string(XlibSurfaceCreateFlagsKHR)
-  {
-    return "{}";
-  }
-#endif /*VK_USE_PLATFORM_XLIB_KHR*/
-
-#ifdef VK_USE_PLATFORM_XCB_KHR
-  VULKAN_HPP_INLINE std::string to_string(XcbSurfaceCreateFlagBitsKHR)
-  {
-    return "(void)";
-  }
-#endif /*VK_USE_PLATFORM_XCB_KHR*/
-
-#ifdef VK_USE_PLATFORM_XCB_KHR
-  VULKAN_HPP_INLINE std::string to_string(XcbSurfaceCreateFlagsKHR)
-  {
-    return "{}";
-  }
-#endif /*VK_USE_PLATFORM_XCB_KHR*/
-
-#ifdef VK_USE_PLATFORM_IOS_MVK
-  VULKAN_HPP_INLINE std::string to_string(IOSSurfaceCreateFlagBitsMVK)
-  {
-    return "(void)";
-  }
-#endif /*VK_USE_PLATFORM_IOS_MVK*/
-
-#ifdef VK_USE_PLATFORM_IOS_MVK
-  VULKAN_HPP_INLINE std::string to_string(IOSSurfaceCreateFlagsMVK)
-  {
-    return "{}";
-  }
-#endif /*VK_USE_PLATFORM_IOS_MVK*/
-
-#ifdef VK_USE_PLATFORM_MACOS_MVK
-  VULKAN_HPP_INLINE std::string to_string(MacOSSurfaceCreateFlagBitsMVK)
-  {
-    return "(void)";
-  }
-#endif /*VK_USE_PLATFORM_MACOS_MVK*/
-
-#ifdef VK_USE_PLATFORM_MACOS_MVK
-  VULKAN_HPP_INLINE std::string to_string(MacOSSurfaceCreateFlagsMVK)
-  {
-    return "{}";
-  }
-#endif /*VK_USE_PLATFORM_MACOS_MVK*/
-
-#ifdef VK_USE_PLATFORM_FUCHSIA_FUCHSIA
-  VULKAN_HPP_INLINE std::string to_string(ImagePipeSurfaceCreateFlagBitsFUCHSIA)
-  {
-    return "(void)";
-  }
-#endif /*VK_USE_PLATFORM_FUCHSIA_FUCHSIA*/
-
-#ifdef VK_USE_PLATFORM_FUCHSIA_FUCHSIA
-  VULKAN_HPP_INLINE std::string to_string(ImagePipeSurfaceCreateFlagsFUCHSIA)
-  {
-    return "{}";
-  }
-#endif /*VK_USE_PLATFORM_FUCHSIA_FUCHSIA*/
-
-  VULKAN_HPP_INLINE std::string to_string(CommandPoolTrimFlagBits)
-  {
-    return "(void)";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(CommandPoolTrimFlags)
-  {
-    return "{}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(PipelineViewportSwizzleStateCreateFlagBitsNV)
-  {
-    return "(void)";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(PipelineViewportSwizzleStateCreateFlagsNV)
-  {
-    return "{}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(PipelineDiscardRectangleStateCreateFlagBitsEXT)
-  {
-    return "(void)";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(PipelineDiscardRectangleStateCreateFlagsEXT)
-  {
-    return "{}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(PipelineCoverageToColorStateCreateFlagBitsNV)
-  {
-    return "(void)";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(PipelineCoverageToColorStateCreateFlagsNV)
-  {
-    return "{}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(PipelineCoverageModulationStateCreateFlagBitsNV)
-  {
-    return "(void)";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(PipelineCoverageModulationStateCreateFlagsNV)
-  {
-    return "{}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(ValidationCacheCreateFlagBitsEXT)
-  {
-    return "(void)";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(ValidationCacheCreateFlagsEXT)
-  {
-    return "{}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(DebugUtilsMessengerCreateFlagBitsEXT)
-  {
-    return "(void)";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(DebugUtilsMessengerCreateFlagsEXT)
-  {
-    return "{}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(DebugUtilsMessengerCallbackDataFlagBitsEXT)
-  {
-    return "(void)";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(DebugUtilsMessengerCallbackDataFlagsEXT)
-  {
-    return "{}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(PipelineRasterizationConservativeStateCreateFlagBitsEXT)
-  {
-    return "(void)";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(PipelineRasterizationConservativeStateCreateFlagsEXT)
-  {
-    return "{}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(PipelineRasterizationStateStreamCreateFlagBitsEXT)
-  {
-    return "(void)";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(PipelineRasterizationStateStreamCreateFlagsEXT)
-  {
-    return "{}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(ImageLayout value)
-  {
-    switch (value)
-    {
-    case ImageLayout::eUndefined: return "Undefined";
-    case ImageLayout::eGeneral: return "General";
-    case ImageLayout::eColorAttachmentOptimal: return "ColorAttachmentOptimal";
-    case ImageLayout::eDepthStencilAttachmentOptimal: return "DepthStencilAttachmentOptimal";
-    case ImageLayout::eDepthStencilReadOnlyOptimal: return "DepthStencilReadOnlyOptimal";
-    case ImageLayout::eShaderReadOnlyOptimal: return "ShaderReadOnlyOptimal";
-    case ImageLayout::eTransferSrcOptimal: return "TransferSrcOptimal";
-    case ImageLayout::eTransferDstOptimal: return "TransferDstOptimal";
-    case ImageLayout::ePreinitialized: return "Preinitialized";
-    case ImageLayout::eDepthReadOnlyStencilAttachmentOptimal: return "DepthReadOnlyStencilAttachmentOptimal";
-    case ImageLayout::eDepthAttachmentStencilReadOnlyOptimal: return "DepthAttachmentStencilReadOnlyOptimal";
-    case ImageLayout::ePresentSrcKHR: return "PresentSrcKHR";
-    case ImageLayout::eSharedPresentKHR: return "SharedPresentKHR";
-    case ImageLayout::eShadingRateOptimalNV: return "ShadingRateOptimalNV";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(AttachmentLoadOp value)
-  {
-    switch (value)
-    {
-    case AttachmentLoadOp::eLoad: return "Load";
-    case AttachmentLoadOp::eClear: return "Clear";
-    case AttachmentLoadOp::eDontCare: return "DontCare";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(AttachmentStoreOp value)
-  {
-    switch (value)
-    {
-    case AttachmentStoreOp::eStore: return "Store";
-    case AttachmentStoreOp::eDontCare: return "DontCare";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(ImageType value)
-  {
-    switch (value)
-    {
-    case ImageType::e1D: return "1D";
-    case ImageType::e2D: return "2D";
-    case ImageType::e3D: return "3D";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(ImageTiling value)
-  {
-    switch (value)
-    {
-    case ImageTiling::eOptimal: return "Optimal";
-    case ImageTiling::eLinear: return "Linear";
-    case ImageTiling::eDrmFormatModifierEXT: return "DrmFormatModifierEXT";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(ImageViewType value)
-  {
-    switch (value)
-    {
-    case ImageViewType::e1D: return "1D";
-    case ImageViewType::e2D: return "2D";
-    case ImageViewType::e3D: return "3D";
-    case ImageViewType::eCube: return "Cube";
-    case ImageViewType::e1DArray: return "1DArray";
-    case ImageViewType::e2DArray: return "2DArray";
-    case ImageViewType::eCubeArray: return "CubeArray";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(CommandBufferLevel value)
-  {
-    switch (value)
-    {
-    case CommandBufferLevel::ePrimary: return "Primary";
-    case CommandBufferLevel::eSecondary: return "Secondary";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(ComponentSwizzle value)
-  {
-    switch (value)
-    {
-    case ComponentSwizzle::eIdentity: return "Identity";
-    case ComponentSwizzle::eZero: return "Zero";
-    case ComponentSwizzle::eOne: return "One";
-    case ComponentSwizzle::eR: return "R";
-    case ComponentSwizzle::eG: return "G";
-    case ComponentSwizzle::eB: return "B";
-    case ComponentSwizzle::eA: return "A";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(DescriptorType value)
-  {
-    switch (value)
-    {
-    case DescriptorType::eSampler: return "Sampler";
-    case DescriptorType::eCombinedImageSampler: return "CombinedImageSampler";
-    case DescriptorType::eSampledImage: return "SampledImage";
-    case DescriptorType::eStorageImage: return "StorageImage";
-    case DescriptorType::eUniformTexelBuffer: return "UniformTexelBuffer";
-    case DescriptorType::eStorageTexelBuffer: return "StorageTexelBuffer";
-    case DescriptorType::eUniformBuffer: return "UniformBuffer";
-    case DescriptorType::eStorageBuffer: return "StorageBuffer";
-    case DescriptorType::eUniformBufferDynamic: return "UniformBufferDynamic";
-    case DescriptorType::eStorageBufferDynamic: return "StorageBufferDynamic";
-    case DescriptorType::eInputAttachment: return "InputAttachment";
-    case DescriptorType::eInlineUniformBlockEXT: return "InlineUniformBlockEXT";
-    case DescriptorType::eAccelerationStructureNV: return "AccelerationStructureNV";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(QueryType value)
-  {
-    switch (value)
-    {
-    case QueryType::eOcclusion: return "Occlusion";
-    case QueryType::ePipelineStatistics: return "PipelineStatistics";
-    case QueryType::eTimestamp: return "Timestamp";
-    case QueryType::eTransformFeedbackStreamEXT: return "TransformFeedbackStreamEXT";
-    case QueryType::eAccelerationStructureCompactedSizeNV: return "AccelerationStructureCompactedSizeNV";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(BorderColor value)
-  {
-    switch (value)
-    {
-    case BorderColor::eFloatTransparentBlack: return "FloatTransparentBlack";
-    case BorderColor::eIntTransparentBlack: return "IntTransparentBlack";
-    case BorderColor::eFloatOpaqueBlack: return "FloatOpaqueBlack";
-    case BorderColor::eIntOpaqueBlack: return "IntOpaqueBlack";
-    case BorderColor::eFloatOpaqueWhite: return "FloatOpaqueWhite";
-    case BorderColor::eIntOpaqueWhite: return "IntOpaqueWhite";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(PipelineBindPoint value)
-  {
-    switch (value)
-    {
-    case PipelineBindPoint::eGraphics: return "Graphics";
-    case PipelineBindPoint::eCompute: return "Compute";
-    case PipelineBindPoint::eRayTracingNV: return "RayTracingNV";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(PipelineCacheHeaderVersion value)
-  {
-    switch (value)
-    {
-    case PipelineCacheHeaderVersion::eOne: return "One";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(PrimitiveTopology value)
-  {
-    switch (value)
-    {
-    case PrimitiveTopology::ePointList: return "PointList";
-    case PrimitiveTopology::eLineList: return "LineList";
-    case PrimitiveTopology::eLineStrip: return "LineStrip";
-    case PrimitiveTopology::eTriangleList: return "TriangleList";
-    case PrimitiveTopology::eTriangleStrip: return "TriangleStrip";
-    case PrimitiveTopology::eTriangleFan: return "TriangleFan";
-    case PrimitiveTopology::eLineListWithAdjacency: return "LineListWithAdjacency";
-    case PrimitiveTopology::eLineStripWithAdjacency: return "LineStripWithAdjacency";
-    case PrimitiveTopology::eTriangleListWithAdjacency: return "TriangleListWithAdjacency";
-    case PrimitiveTopology::eTriangleStripWithAdjacency: return "TriangleStripWithAdjacency";
-    case PrimitiveTopology::ePatchList: return "PatchList";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(SharingMode value)
-  {
-    switch (value)
-    {
-    case SharingMode::eExclusive: return "Exclusive";
-    case SharingMode::eConcurrent: return "Concurrent";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(IndexType value)
-  {
-    switch (value)
-    {
-    case IndexType::eUint16: return "Uint16";
-    case IndexType::eUint32: return "Uint32";
-    case IndexType::eNoneNV: return "NoneNV";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(Filter value)
-  {
-    switch (value)
-    {
-    case Filter::eNearest: return "Nearest";
-    case Filter::eLinear: return "Linear";
-    case Filter::eCubicIMG: return "CubicIMG";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(SamplerMipmapMode value)
-  {
-    switch (value)
-    {
-    case SamplerMipmapMode::eNearest: return "Nearest";
-    case SamplerMipmapMode::eLinear: return "Linear";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(SamplerAddressMode value)
-  {
-    switch (value)
-    {
-    case SamplerAddressMode::eRepeat: return "Repeat";
-    case SamplerAddressMode::eMirroredRepeat: return "MirroredRepeat";
-    case SamplerAddressMode::eClampToEdge: return "ClampToEdge";
-    case SamplerAddressMode::eClampToBorder: return "ClampToBorder";
-    case SamplerAddressMode::eMirrorClampToEdge: return "MirrorClampToEdge";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(CompareOp value)
-  {
-    switch (value)
-    {
-    case CompareOp::eNever: return "Never";
-    case CompareOp::eLess: return "Less";
-    case CompareOp::eEqual: return "Equal";
-    case CompareOp::eLessOrEqual: return "LessOrEqual";
-    case CompareOp::eGreater: return "Greater";
-    case CompareOp::eNotEqual: return "NotEqual";
-    case CompareOp::eGreaterOrEqual: return "GreaterOrEqual";
-    case CompareOp::eAlways: return "Always";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(PolygonMode value)
-  {
-    switch (value)
-    {
-    case PolygonMode::eFill: return "Fill";
-    case PolygonMode::eLine: return "Line";
-    case PolygonMode::ePoint: return "Point";
-    case PolygonMode::eFillRectangleNV: return "FillRectangleNV";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(CullModeFlagBits value)
-  {
-    switch (value)
-    {
-    case CullModeFlagBits::eNone: return "None";
-    case CullModeFlagBits::eFront: return "Front";
-    case CullModeFlagBits::eBack: return "Back";
-    case CullModeFlagBits::eFrontAndBack: return "FrontAndBack";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(CullModeFlags value)
-  {
-    if (!value) return "{}";
-    std::string result;
-    if (value & CullModeFlagBits::eNone) result += "None | ";
-    if (value & CullModeFlagBits::eFront) result += "Front | ";
-    if (value & CullModeFlagBits::eBack) result += "Back | ";
-    if (value & CullModeFlagBits::eFrontAndBack) result += "FrontAndBack | ";
-    return "{" + result.substr(0, result.size() - 3) + "}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(FrontFace value)
-  {
-    switch (value)
-    {
-    case FrontFace::eCounterClockwise: return "CounterClockwise";
-    case FrontFace::eClockwise: return "Clockwise";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(BlendFactor value)
-  {
-    switch (value)
-    {
-    case BlendFactor::eZero: return "Zero";
-    case BlendFactor::eOne: return "One";
-    case BlendFactor::eSrcColor: return "SrcColor";
-    case BlendFactor::eOneMinusSrcColor: return "OneMinusSrcColor";
-    case BlendFactor::eDstColor: return "DstColor";
-    case BlendFactor::eOneMinusDstColor: return "OneMinusDstColor";
-    case BlendFactor::eSrcAlpha: return "SrcAlpha";
-    case BlendFactor::eOneMinusSrcAlpha: return "OneMinusSrcAlpha";
-    case BlendFactor::eDstAlpha: return "DstAlpha";
-    case BlendFactor::eOneMinusDstAlpha: return "OneMinusDstAlpha";
-    case BlendFactor::eConstantColor: return "ConstantColor";
-    case BlendFactor::eOneMinusConstantColor: return "OneMinusConstantColor";
-    case BlendFactor::eConstantAlpha: return "ConstantAlpha";
-    case BlendFactor::eOneMinusConstantAlpha: return "OneMinusConstantAlpha";
-    case BlendFactor::eSrcAlphaSaturate: return "SrcAlphaSaturate";
-    case BlendFactor::eSrc1Color: return "Src1Color";
-    case BlendFactor::eOneMinusSrc1Color: return "OneMinusSrc1Color";
-    case BlendFactor::eSrc1Alpha: return "Src1Alpha";
-    case BlendFactor::eOneMinusSrc1Alpha: return "OneMinusSrc1Alpha";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(BlendOp value)
-  {
-    switch (value)
-    {
-    case BlendOp::eAdd: return "Add";
-    case BlendOp::eSubtract: return "Subtract";
-    case BlendOp::eReverseSubtract: return "ReverseSubtract";
-    case BlendOp::eMin: return "Min";
-    case BlendOp::eMax: return "Max";
-    case BlendOp::eZeroEXT: return "ZeroEXT";
-    case BlendOp::eSrcEXT: return "SrcEXT";
-    case BlendOp::eDstEXT: return "DstEXT";
-    case BlendOp::eSrcOverEXT: return "SrcOverEXT";
-    case BlendOp::eDstOverEXT: return "DstOverEXT";
-    case BlendOp::eSrcInEXT: return "SrcInEXT";
-    case BlendOp::eDstInEXT: return "DstInEXT";
-    case BlendOp::eSrcOutEXT: return "SrcOutEXT";
-    case BlendOp::eDstOutEXT: return "DstOutEXT";
-    case BlendOp::eSrcAtopEXT: return "SrcAtopEXT";
-    case BlendOp::eDstAtopEXT: return "DstAtopEXT";
-    case BlendOp::eXorEXT: return "XorEXT";
-    case BlendOp::eMultiplyEXT: return "MultiplyEXT";
-    case BlendOp::eScreenEXT: return "ScreenEXT";
-    case BlendOp::eOverlayEXT: return "OverlayEXT";
-    case BlendOp::eDarkenEXT: return "DarkenEXT";
-    case BlendOp::eLightenEXT: return "LightenEXT";
-    case BlendOp::eColordodgeEXT: return "ColordodgeEXT";
-    case BlendOp::eColorburnEXT: return "ColorburnEXT";
-    case BlendOp::eHardlightEXT: return "HardlightEXT";
-    case BlendOp::eSoftlightEXT: return "SoftlightEXT";
-    case BlendOp::eDifferenceEXT: return "DifferenceEXT";
-    case BlendOp::eExclusionEXT: return "ExclusionEXT";
-    case BlendOp::eInvertEXT: return "InvertEXT";
-    case BlendOp::eInvertRgbEXT: return "InvertRgbEXT";
-    case BlendOp::eLineardodgeEXT: return "LineardodgeEXT";
-    case BlendOp::eLinearburnEXT: return "LinearburnEXT";
-    case BlendOp::eVividlightEXT: return "VividlightEXT";
-    case BlendOp::eLinearlightEXT: return "LinearlightEXT";
-    case BlendOp::ePinlightEXT: return "PinlightEXT";
-    case BlendOp::eHardmixEXT: return "HardmixEXT";
-    case BlendOp::eHslHueEXT: return "HslHueEXT";
-    case BlendOp::eHslSaturationEXT: return "HslSaturationEXT";
-    case BlendOp::eHslColorEXT: return "HslColorEXT";
-    case BlendOp::eHslLuminosityEXT: return "HslLuminosityEXT";
-    case BlendOp::ePlusEXT: return "PlusEXT";
-    case BlendOp::ePlusClampedEXT: return "PlusClampedEXT";
-    case BlendOp::ePlusClampedAlphaEXT: return "PlusClampedAlphaEXT";
-    case BlendOp::ePlusDarkerEXT: return "PlusDarkerEXT";
-    case BlendOp::eMinusEXT: return "MinusEXT";
-    case BlendOp::eMinusClampedEXT: return "MinusClampedEXT";
-    case BlendOp::eContrastEXT: return "ContrastEXT";
-    case BlendOp::eInvertOvgEXT: return "InvertOvgEXT";
-    case BlendOp::eRedEXT: return "RedEXT";
-    case BlendOp::eGreenEXT: return "GreenEXT";
-    case BlendOp::eBlueEXT: return "BlueEXT";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(StencilOp value)
-  {
-    switch (value)
-    {
-    case StencilOp::eKeep: return "Keep";
-    case StencilOp::eZero: return "Zero";
-    case StencilOp::eReplace: return "Replace";
-    case StencilOp::eIncrementAndClamp: return "IncrementAndClamp";
-    case StencilOp::eDecrementAndClamp: return "DecrementAndClamp";
-    case StencilOp::eInvert: return "Invert";
-    case StencilOp::eIncrementAndWrap: return "IncrementAndWrap";
-    case StencilOp::eDecrementAndWrap: return "DecrementAndWrap";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(LogicOp value)
-  {
-    switch (value)
-    {
-    case LogicOp::eClear: return "Clear";
-    case LogicOp::eAnd: return "And";
-    case LogicOp::eAndReverse: return "AndReverse";
-    case LogicOp::eCopy: return "Copy";
-    case LogicOp::eAndInverted: return "AndInverted";
-    case LogicOp::eNoOp: return "NoOp";
-    case LogicOp::eXor: return "Xor";
-    case LogicOp::eOr: return "Or";
-    case LogicOp::eNor: return "Nor";
-    case LogicOp::eEquivalent: return "Equivalent";
-    case LogicOp::eInvert: return "Invert";
-    case LogicOp::eOrReverse: return "OrReverse";
-    case LogicOp::eCopyInverted: return "CopyInverted";
-    case LogicOp::eOrInverted: return "OrInverted";
-    case LogicOp::eNand: return "Nand";
-    case LogicOp::eSet: return "Set";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(InternalAllocationType value)
-  {
-    switch (value)
-    {
-    case InternalAllocationType::eExecutable: return "Executable";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(SystemAllocationScope value)
-  {
-    switch (value)
-    {
-    case SystemAllocationScope::eCommand: return "Command";
-    case SystemAllocationScope::eObject: return "Object";
-    case SystemAllocationScope::eCache: return "Cache";
-    case SystemAllocationScope::eDevice: return "Device";
-    case SystemAllocationScope::eInstance: return "Instance";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(PhysicalDeviceType value)
-  {
-    switch (value)
-    {
-    case PhysicalDeviceType::eOther: return "Other";
-    case PhysicalDeviceType::eIntegratedGpu: return "IntegratedGpu";
-    case PhysicalDeviceType::eDiscreteGpu: return "DiscreteGpu";
-    case PhysicalDeviceType::eVirtualGpu: return "VirtualGpu";
-    case PhysicalDeviceType::eCpu: return "Cpu";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(VertexInputRate value)
-  {
-    switch (value)
-    {
-    case VertexInputRate::eVertex: return "Vertex";
-    case VertexInputRate::eInstance: return "Instance";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(Format value)
-  {
-    switch (value)
-    {
-    case Format::eUndefined: return "Undefined";
-    case Format::eR4G4UnormPack8: return "R4G4UnormPack8";
-    case Format::eR4G4B4A4UnormPack16: return "R4G4B4A4UnormPack16";
-    case Format::eB4G4R4A4UnormPack16: return "B4G4R4A4UnormPack16";
-    case Format::eR5G6B5UnormPack16: return "R5G6B5UnormPack16";
-    case Format::eB5G6R5UnormPack16: return "B5G6R5UnormPack16";
-    case Format::eR5G5B5A1UnormPack16: return "R5G5B5A1UnormPack16";
-    case Format::eB5G5R5A1UnormPack16: return "B5G5R5A1UnormPack16";
-    case Format::eA1R5G5B5UnormPack16: return "A1R5G5B5UnormPack16";
-    case Format::eR8Unorm: return "R8Unorm";
-    case Format::eR8Snorm: return "R8Snorm";
-    case Format::eR8Uscaled: return "R8Uscaled";
-    case Format::eR8Sscaled: return "R8Sscaled";
-    case Format::eR8Uint: return "R8Uint";
-    case Format::eR8Sint: return "R8Sint";
-    case Format::eR8Srgb: return "R8Srgb";
-    case Format::eR8G8Unorm: return "R8G8Unorm";
-    case Format::eR8G8Snorm: return "R8G8Snorm";
-    case Format::eR8G8Uscaled: return "R8G8Uscaled";
-    case Format::eR8G8Sscaled: return "R8G8Sscaled";
-    case Format::eR8G8Uint: return "R8G8Uint";
-    case Format::eR8G8Sint: return "R8G8Sint";
-    case Format::eR8G8Srgb: return "R8G8Srgb";
-    case Format::eR8G8B8Unorm: return "R8G8B8Unorm";
-    case Format::eR8G8B8Snorm: return "R8G8B8Snorm";
-    case Format::eR8G8B8Uscaled: return "R8G8B8Uscaled";
-    case Format::eR8G8B8Sscaled: return "R8G8B8Sscaled";
-    case Format::eR8G8B8Uint: return "R8G8B8Uint";
-    case Format::eR8G8B8Sint: return "R8G8B8Sint";
-    case Format::eR8G8B8Srgb: return "R8G8B8Srgb";
-    case Format::eB8G8R8Unorm: return "B8G8R8Unorm";
-    case Format::eB8G8R8Snorm: return "B8G8R8Snorm";
-    case Format::eB8G8R8Uscaled: return "B8G8R8Uscaled";
-    case Format::eB8G8R8Sscaled: return "B8G8R8Sscaled";
-    case Format::eB8G8R8Uint: return "B8G8R8Uint";
-    case Format::eB8G8R8Sint: return "B8G8R8Sint";
-    case Format::eB8G8R8Srgb: return "B8G8R8Srgb";
-    case Format::eR8G8B8A8Unorm: return "R8G8B8A8Unorm";
-    case Format::eR8G8B8A8Snorm: return "R8G8B8A8Snorm";
-    case Format::eR8G8B8A8Uscaled: return "R8G8B8A8Uscaled";
-    case Format::eR8G8B8A8Sscaled: return "R8G8B8A8Sscaled";
-    case Format::eR8G8B8A8Uint: return "R8G8B8A8Uint";
-    case Format::eR8G8B8A8Sint: return "R8G8B8A8Sint";
-    case Format::eR8G8B8A8Srgb: return "R8G8B8A8Srgb";
-    case Format::eB8G8R8A8Unorm: return "B8G8R8A8Unorm";
-    case Format::eB8G8R8A8Snorm: return "B8G8R8A8Snorm";
-    case Format::eB8G8R8A8Uscaled: return "B8G8R8A8Uscaled";
-    case Format::eB8G8R8A8Sscaled: return "B8G8R8A8Sscaled";
-    case Format::eB8G8R8A8Uint: return "B8G8R8A8Uint";
-    case Format::eB8G8R8A8Sint: return "B8G8R8A8Sint";
-    case Format::eB8G8R8A8Srgb: return "B8G8R8A8Srgb";
-    case Format::eA8B8G8R8UnormPack32: return "A8B8G8R8UnormPack32";
-    case Format::eA8B8G8R8SnormPack32: return "A8B8G8R8SnormPack32";
-    case Format::eA8B8G8R8UscaledPack32: return "A8B8G8R8UscaledPack32";
-    case Format::eA8B8G8R8SscaledPack32: return "A8B8G8R8SscaledPack32";
-    case Format::eA8B8G8R8UintPack32: return "A8B8G8R8UintPack32";
-    case Format::eA8B8G8R8SintPack32: return "A8B8G8R8SintPack32";
-    case Format::eA8B8G8R8SrgbPack32: return "A8B8G8R8SrgbPack32";
-    case Format::eA2R10G10B10UnormPack32: return "A2R10G10B10UnormPack32";
-    case Format::eA2R10G10B10SnormPack32: return "A2R10G10B10SnormPack32";
-    case Format::eA2R10G10B10UscaledPack32: return "A2R10G10B10UscaledPack32";
-    case Format::eA2R10G10B10SscaledPack32: return "A2R10G10B10SscaledPack32";
-    case Format::eA2R10G10B10UintPack32: return "A2R10G10B10UintPack32";
-    case Format::eA2R10G10B10SintPack32: return "A2R10G10B10SintPack32";
-    case Format::eA2B10G10R10UnormPack32: return "A2B10G10R10UnormPack32";
-    case Format::eA2B10G10R10SnormPack32: return "A2B10G10R10SnormPack32";
-    case Format::eA2B10G10R10UscaledPack32: return "A2B10G10R10UscaledPack32";
-    case Format::eA2B10G10R10SscaledPack32: return "A2B10G10R10SscaledPack32";
-    case Format::eA2B10G10R10UintPack32: return "A2B10G10R10UintPack32";
-    case Format::eA2B10G10R10SintPack32: return "A2B10G10R10SintPack32";
-    case Format::eR16Unorm: return "R16Unorm";
-    case Format::eR16Snorm: return "R16Snorm";
-    case Format::eR16Uscaled: return "R16Uscaled";
-    case Format::eR16Sscaled: return "R16Sscaled";
-    case Format::eR16Uint: return "R16Uint";
-    case Format::eR16Sint: return "R16Sint";
-    case Format::eR16Sfloat: return "R16Sfloat";
-    case Format::eR16G16Unorm: return "R16G16Unorm";
-    case Format::eR16G16Snorm: return "R16G16Snorm";
-    case Format::eR16G16Uscaled: return "R16G16Uscaled";
-    case Format::eR16G16Sscaled: return "R16G16Sscaled";
-    case Format::eR16G16Uint: return "R16G16Uint";
-    case Format::eR16G16Sint: return "R16G16Sint";
-    case Format::eR16G16Sfloat: return "R16G16Sfloat";
-    case Format::eR16G16B16Unorm: return "R16G16B16Unorm";
-    case Format::eR16G16B16Snorm: return "R16G16B16Snorm";
-    case Format::eR16G16B16Uscaled: return "R16G16B16Uscaled";
-    case Format::eR16G16B16Sscaled: return "R16G16B16Sscaled";
-    case Format::eR16G16B16Uint: return "R16G16B16Uint";
-    case Format::eR16G16B16Sint: return "R16G16B16Sint";
-    case Format::eR16G16B16Sfloat: return "R16G16B16Sfloat";
-    case Format::eR16G16B16A16Unorm: return "R16G16B16A16Unorm";
-    case Format::eR16G16B16A16Snorm: return "R16G16B16A16Snorm";
-    case Format::eR16G16B16A16Uscaled: return "R16G16B16A16Uscaled";
-    case Format::eR16G16B16A16Sscaled: return "R16G16B16A16Sscaled";
-    case Format::eR16G16B16A16Uint: return "R16G16B16A16Uint";
-    case Format::eR16G16B16A16Sint: return "R16G16B16A16Sint";
-    case Format::eR16G16B16A16Sfloat: return "R16G16B16A16Sfloat";
-    case Format::eR32Uint: return "R32Uint";
-    case Format::eR32Sint: return "R32Sint";
-    case Format::eR32Sfloat: return "R32Sfloat";
-    case Format::eR32G32Uint: return "R32G32Uint";
-    case Format::eR32G32Sint: return "R32G32Sint";
-    case Format::eR32G32Sfloat: return "R32G32Sfloat";
-    case Format::eR32G32B32Uint: return "R32G32B32Uint";
-    case Format::eR32G32B32Sint: return "R32G32B32Sint";
-    case Format::eR32G32B32Sfloat: return "R32G32B32Sfloat";
-    case Format::eR32G32B32A32Uint: return "R32G32B32A32Uint";
-    case Format::eR32G32B32A32Sint: return "R32G32B32A32Sint";
-    case Format::eR32G32B32A32Sfloat: return "R32G32B32A32Sfloat";
-    case Format::eR64Uint: return "R64Uint";
-    case Format::eR64Sint: return "R64Sint";
-    case Format::eR64Sfloat: return "R64Sfloat";
-    case Format::eR64G64Uint: return "R64G64Uint";
-    case Format::eR64G64Sint: return "R64G64Sint";
-    case Format::eR64G64Sfloat: return "R64G64Sfloat";
-    case Format::eR64G64B64Uint: return "R64G64B64Uint";
-    case Format::eR64G64B64Sint: return "R64G64B64Sint";
-    case Format::eR64G64B64Sfloat: return "R64G64B64Sfloat";
-    case Format::eR64G64B64A64Uint: return "R64G64B64A64Uint";
-    case Format::eR64G64B64A64Sint: return "R64G64B64A64Sint";
-    case Format::eR64G64B64A64Sfloat: return "R64G64B64A64Sfloat";
-    case Format::eB10G11R11UfloatPack32: return "B10G11R11UfloatPack32";
-    case Format::eE5B9G9R9UfloatPack32: return "E5B9G9R9UfloatPack32";
-    case Format::eD16Unorm: return "D16Unorm";
-    case Format::eX8D24UnormPack32: return "X8D24UnormPack32";
-    case Format::eD32Sfloat: return "D32Sfloat";
-    case Format::eS8Uint: return "S8Uint";
-    case Format::eD16UnormS8Uint: return "D16UnormS8Uint";
-    case Format::eD24UnormS8Uint: return "D24UnormS8Uint";
-    case Format::eD32SfloatS8Uint: return "D32SfloatS8Uint";
-    case Format::eBc1RgbUnormBlock: return "Bc1RgbUnormBlock";
-    case Format::eBc1RgbSrgbBlock: return "Bc1RgbSrgbBlock";
-    case Format::eBc1RgbaUnormBlock: return "Bc1RgbaUnormBlock";
-    case Format::eBc1RgbaSrgbBlock: return "Bc1RgbaSrgbBlock";
-    case Format::eBc2UnormBlock: return "Bc2UnormBlock";
-    case Format::eBc2SrgbBlock: return "Bc2SrgbBlock";
-    case Format::eBc3UnormBlock: return "Bc3UnormBlock";
-    case Format::eBc3SrgbBlock: return "Bc3SrgbBlock";
-    case Format::eBc4UnormBlock: return "Bc4UnormBlock";
-    case Format::eBc4SnormBlock: return "Bc4SnormBlock";
-    case Format::eBc5UnormBlock: return "Bc5UnormBlock";
-    case Format::eBc5SnormBlock: return "Bc5SnormBlock";
-    case Format::eBc6HUfloatBlock: return "Bc6HUfloatBlock";
-    case Format::eBc6HSfloatBlock: return "Bc6HSfloatBlock";
-    case Format::eBc7UnormBlock: return "Bc7UnormBlock";
-    case Format::eBc7SrgbBlock: return "Bc7SrgbBlock";
-    case Format::eEtc2R8G8B8UnormBlock: return "Etc2R8G8B8UnormBlock";
-    case Format::eEtc2R8G8B8SrgbBlock: return "Etc2R8G8B8SrgbBlock";
-    case Format::eEtc2R8G8B8A1UnormBlock: return "Etc2R8G8B8A1UnormBlock";
-    case Format::eEtc2R8G8B8A1SrgbBlock: return "Etc2R8G8B8A1SrgbBlock";
-    case Format::eEtc2R8G8B8A8UnormBlock: return "Etc2R8G8B8A8UnormBlock";
-    case Format::eEtc2R8G8B8A8SrgbBlock: return "Etc2R8G8B8A8SrgbBlock";
-    case Format::eEacR11UnormBlock: return "EacR11UnormBlock";
-    case Format::eEacR11SnormBlock: return "EacR11SnormBlock";
-    case Format::eEacR11G11UnormBlock: return "EacR11G11UnormBlock";
-    case Format::eEacR11G11SnormBlock: return "EacR11G11SnormBlock";
-    case Format::eAstc4x4UnormBlock: return "Astc4x4UnormBlock";
-    case Format::eAstc4x4SrgbBlock: return "Astc4x4SrgbBlock";
-    case Format::eAstc5x4UnormBlock: return "Astc5x4UnormBlock";
-    case Format::eAstc5x4SrgbBlock: return "Astc5x4SrgbBlock";
-    case Format::eAstc5x5UnormBlock: return "Astc5x5UnormBlock";
-    case Format::eAstc5x5SrgbBlock: return "Astc5x5SrgbBlock";
-    case Format::eAstc6x5UnormBlock: return "Astc6x5UnormBlock";
-    case Format::eAstc6x5SrgbBlock: return "Astc6x5SrgbBlock";
-    case Format::eAstc6x6UnormBlock: return "Astc6x6UnormBlock";
-    case Format::eAstc6x6SrgbBlock: return "Astc6x6SrgbBlock";
-    case Format::eAstc8x5UnormBlock: return "Astc8x5UnormBlock";
-    case Format::eAstc8x5SrgbBlock: return "Astc8x5SrgbBlock";
-    case Format::eAstc8x6UnormBlock: return "Astc8x6UnormBlock";
-    case Format::eAstc8x6SrgbBlock: return "Astc8x6SrgbBlock";
-    case Format::eAstc8x8UnormBlock: return "Astc8x8UnormBlock";
-    case Format::eAstc8x8SrgbBlock: return "Astc8x8SrgbBlock";
-    case Format::eAstc10x5UnormBlock: return "Astc10x5UnormBlock";
-    case Format::eAstc10x5SrgbBlock: return "Astc10x5SrgbBlock";
-    case Format::eAstc10x6UnormBlock: return "Astc10x6UnormBlock";
-    case Format::eAstc10x6SrgbBlock: return "Astc10x6SrgbBlock";
-    case Format::eAstc10x8UnormBlock: return "Astc10x8UnormBlock";
-    case Format::eAstc10x8SrgbBlock: return "Astc10x8SrgbBlock";
-    case Format::eAstc10x10UnormBlock: return "Astc10x10UnormBlock";
-    case Format::eAstc10x10SrgbBlock: return "Astc10x10SrgbBlock";
-    case Format::eAstc12x10UnormBlock: return "Astc12x10UnormBlock";
-    case Format::eAstc12x10SrgbBlock: return "Astc12x10SrgbBlock";
-    case Format::eAstc12x12UnormBlock: return "Astc12x12UnormBlock";
-    case Format::eAstc12x12SrgbBlock: return "Astc12x12SrgbBlock";
-    case Format::eG8B8G8R8422Unorm: return "G8B8G8R8422Unorm";
-    case Format::eB8G8R8G8422Unorm: return "B8G8R8G8422Unorm";
-    case Format::eG8B8R83Plane420Unorm: return "G8B8R83Plane420Unorm";
-    case Format::eG8B8R82Plane420Unorm: return "G8B8R82Plane420Unorm";
-    case Format::eG8B8R83Plane422Unorm: return "G8B8R83Plane422Unorm";
-    case Format::eG8B8R82Plane422Unorm: return "G8B8R82Plane422Unorm";
-    case Format::eG8B8R83Plane444Unorm: return "G8B8R83Plane444Unorm";
-    case Format::eR10X6UnormPack16: return "R10X6UnormPack16";
-    case Format::eR10X6G10X6Unorm2Pack16: return "R10X6G10X6Unorm2Pack16";
-    case Format::eR10X6G10X6B10X6A10X6Unorm4Pack16: return "R10X6G10X6B10X6A10X6Unorm4Pack16";
-    case Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: return "G10X6B10X6G10X6R10X6422Unorm4Pack16";
-    case Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: return "B10X6G10X6R10X6G10X6422Unorm4Pack16";
-    case Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: return "G10X6B10X6R10X63Plane420Unorm3Pack16";
-    case Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: return "G10X6B10X6R10X62Plane420Unorm3Pack16";
-    case Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: return "G10X6B10X6R10X63Plane422Unorm3Pack16";
-    case Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: return "G10X6B10X6R10X62Plane422Unorm3Pack16";
-    case Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: return "G10X6B10X6R10X63Plane444Unorm3Pack16";
-    case Format::eR12X4UnormPack16: return "R12X4UnormPack16";
-    case Format::eR12X4G12X4Unorm2Pack16: return "R12X4G12X4Unorm2Pack16";
-    case Format::eR12X4G12X4B12X4A12X4Unorm4Pack16: return "R12X4G12X4B12X4A12X4Unorm4Pack16";
-    case Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: return "G12X4B12X4G12X4R12X4422Unorm4Pack16";
-    case Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: return "B12X4G12X4R12X4G12X4422Unorm4Pack16";
-    case Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: return "G12X4B12X4R12X43Plane420Unorm3Pack16";
-    case Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: return "G12X4B12X4R12X42Plane420Unorm3Pack16";
-    case Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: return "G12X4B12X4R12X43Plane422Unorm3Pack16";
-    case Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: return "G12X4B12X4R12X42Plane422Unorm3Pack16";
-    case Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: return "G12X4B12X4R12X43Plane444Unorm3Pack16";
-    case Format::eG16B16G16R16422Unorm: return "G16B16G16R16422Unorm";
-    case Format::eB16G16R16G16422Unorm: return "B16G16R16G16422Unorm";
-    case Format::eG16B16R163Plane420Unorm: return "G16B16R163Plane420Unorm";
-    case Format::eG16B16R162Plane420Unorm: return "G16B16R162Plane420Unorm";
-    case Format::eG16B16R163Plane422Unorm: return "G16B16R163Plane422Unorm";
-    case Format::eG16B16R162Plane422Unorm: return "G16B16R162Plane422Unorm";
-    case Format::eG16B16R163Plane444Unorm: return "G16B16R163Plane444Unorm";
-    case Format::ePvrtc12BppUnormBlockIMG: return "Pvrtc12BppUnormBlockIMG";
-    case Format::ePvrtc14BppUnormBlockIMG: return "Pvrtc14BppUnormBlockIMG";
-    case Format::ePvrtc22BppUnormBlockIMG: return "Pvrtc22BppUnormBlockIMG";
-    case Format::ePvrtc24BppUnormBlockIMG: return "Pvrtc24BppUnormBlockIMG";
-    case Format::ePvrtc12BppSrgbBlockIMG: return "Pvrtc12BppSrgbBlockIMG";
-    case Format::ePvrtc14BppSrgbBlockIMG: return "Pvrtc14BppSrgbBlockIMG";
-    case Format::ePvrtc22BppSrgbBlockIMG: return "Pvrtc22BppSrgbBlockIMG";
-    case Format::ePvrtc24BppSrgbBlockIMG: return "Pvrtc24BppSrgbBlockIMG";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(StructureType value)
-  {
-    switch (value)
-    {
-    case StructureType::eApplicationInfo: return "ApplicationInfo";
-    case StructureType::eInstanceCreateInfo: return "InstanceCreateInfo";
-    case StructureType::eDeviceQueueCreateInfo: return "DeviceQueueCreateInfo";
-    case StructureType::eDeviceCreateInfo: return "DeviceCreateInfo";
-    case StructureType::eSubmitInfo: return "SubmitInfo";
-    case StructureType::eMemoryAllocateInfo: return "MemoryAllocateInfo";
-    case StructureType::eMappedMemoryRange: return "MappedMemoryRange";
-    case StructureType::eBindSparseInfo: return "BindSparseInfo";
-    case StructureType::eFenceCreateInfo: return "FenceCreateInfo";
-    case StructureType::eSemaphoreCreateInfo: return "SemaphoreCreateInfo";
-    case StructureType::eEventCreateInfo: return "EventCreateInfo";
-    case StructureType::eQueryPoolCreateInfo: return "QueryPoolCreateInfo";
-    case StructureType::eBufferCreateInfo: return "BufferCreateInfo";
-    case StructureType::eBufferViewCreateInfo: return "BufferViewCreateInfo";
-    case StructureType::eImageCreateInfo: return "ImageCreateInfo";
-    case StructureType::eImageViewCreateInfo: return "ImageViewCreateInfo";
-    case StructureType::eShaderModuleCreateInfo: return "ShaderModuleCreateInfo";
-    case StructureType::ePipelineCacheCreateInfo: return "PipelineCacheCreateInfo";
-    case StructureType::ePipelineShaderStageCreateInfo: return "PipelineShaderStageCreateInfo";
-    case StructureType::ePipelineVertexInputStateCreateInfo: return "PipelineVertexInputStateCreateInfo";
-    case StructureType::ePipelineInputAssemblyStateCreateInfo: return "PipelineInputAssemblyStateCreateInfo";
-    case StructureType::ePipelineTessellationStateCreateInfo: return "PipelineTessellationStateCreateInfo";
-    case StructureType::ePipelineViewportStateCreateInfo: return "PipelineViewportStateCreateInfo";
-    case StructureType::ePipelineRasterizationStateCreateInfo: return "PipelineRasterizationStateCreateInfo";
-    case StructureType::ePipelineMultisampleStateCreateInfo: return "PipelineMultisampleStateCreateInfo";
-    case StructureType::ePipelineDepthStencilStateCreateInfo: return "PipelineDepthStencilStateCreateInfo";
-    case StructureType::ePipelineColorBlendStateCreateInfo: return "PipelineColorBlendStateCreateInfo";
-    case StructureType::ePipelineDynamicStateCreateInfo: return "PipelineDynamicStateCreateInfo";
-    case StructureType::eGraphicsPipelineCreateInfo: return "GraphicsPipelineCreateInfo";
-    case StructureType::eComputePipelineCreateInfo: return "ComputePipelineCreateInfo";
-    case StructureType::ePipelineLayoutCreateInfo: return "PipelineLayoutCreateInfo";
-    case StructureType::eSamplerCreateInfo: return "SamplerCreateInfo";
-    case StructureType::eDescriptorSetLayoutCreateInfo: return "DescriptorSetLayoutCreateInfo";
-    case StructureType::eDescriptorPoolCreateInfo: return "DescriptorPoolCreateInfo";
-    case StructureType::eDescriptorSetAllocateInfo: return "DescriptorSetAllocateInfo";
-    case StructureType::eWriteDescriptorSet: return "WriteDescriptorSet";
-    case StructureType::eCopyDescriptorSet: return "CopyDescriptorSet";
-    case StructureType::eFramebufferCreateInfo: return "FramebufferCreateInfo";
-    case StructureType::eRenderPassCreateInfo: return "RenderPassCreateInfo";
-    case StructureType::eCommandPoolCreateInfo: return "CommandPoolCreateInfo";
-    case StructureType::eCommandBufferAllocateInfo: return "CommandBufferAllocateInfo";
-    case StructureType::eCommandBufferInheritanceInfo: return "CommandBufferInheritanceInfo";
-    case StructureType::eCommandBufferBeginInfo: return "CommandBufferBeginInfo";
-    case StructureType::eRenderPassBeginInfo: return "RenderPassBeginInfo";
-    case StructureType::eBufferMemoryBarrier: return "BufferMemoryBarrier";
-    case StructureType::eImageMemoryBarrier: return "ImageMemoryBarrier";
-    case StructureType::eMemoryBarrier: return "MemoryBarrier";
-    case StructureType::eLoaderInstanceCreateInfo: return "LoaderInstanceCreateInfo";
-    case StructureType::eLoaderDeviceCreateInfo: return "LoaderDeviceCreateInfo";
-    case StructureType::ePhysicalDeviceSubgroupProperties: return "PhysicalDeviceSubgroupProperties";
-    case StructureType::eBindBufferMemoryInfo: return "BindBufferMemoryInfo";
-    case StructureType::eBindImageMemoryInfo: return "BindImageMemoryInfo";
-    case StructureType::ePhysicalDevice16BitStorageFeatures: return "PhysicalDevice16BitStorageFeatures";
-    case StructureType::eMemoryDedicatedRequirements: return "MemoryDedicatedRequirements";
-    case StructureType::eMemoryDedicatedAllocateInfo: return "MemoryDedicatedAllocateInfo";
-    case StructureType::eMemoryAllocateFlagsInfo: return "MemoryAllocateFlagsInfo";
-    case StructureType::eDeviceGroupRenderPassBeginInfo: return "DeviceGroupRenderPassBeginInfo";
-    case StructureType::eDeviceGroupCommandBufferBeginInfo: return "DeviceGroupCommandBufferBeginInfo";
-    case StructureType::eDeviceGroupSubmitInfo: return "DeviceGroupSubmitInfo";
-    case StructureType::eDeviceGroupBindSparseInfo: return "DeviceGroupBindSparseInfo";
-    case StructureType::eBindBufferMemoryDeviceGroupInfo: return "BindBufferMemoryDeviceGroupInfo";
-    case StructureType::eBindImageMemoryDeviceGroupInfo: return "BindImageMemoryDeviceGroupInfo";
-    case StructureType::ePhysicalDeviceGroupProperties: return "PhysicalDeviceGroupProperties";
-    case StructureType::eDeviceGroupDeviceCreateInfo: return "DeviceGroupDeviceCreateInfo";
-    case StructureType::eBufferMemoryRequirementsInfo2: return "BufferMemoryRequirementsInfo2";
-    case StructureType::eImageMemoryRequirementsInfo2: return "ImageMemoryRequirementsInfo2";
-    case StructureType::eImageSparseMemoryRequirementsInfo2: return "ImageSparseMemoryRequirementsInfo2";
-    case StructureType::eMemoryRequirements2: return "MemoryRequirements2";
-    case StructureType::eSparseImageMemoryRequirements2: return "SparseImageMemoryRequirements2";
-    case StructureType::ePhysicalDeviceFeatures2: return "PhysicalDeviceFeatures2";
-    case StructureType::ePhysicalDeviceProperties2: return "PhysicalDeviceProperties2";
-    case StructureType::eFormatProperties2: return "FormatProperties2";
-    case StructureType::eImageFormatProperties2: return "ImageFormatProperties2";
-    case StructureType::ePhysicalDeviceImageFormatInfo2: return "PhysicalDeviceImageFormatInfo2";
-    case StructureType::eQueueFamilyProperties2: return "QueueFamilyProperties2";
-    case StructureType::ePhysicalDeviceMemoryProperties2: return "PhysicalDeviceMemoryProperties2";
-    case StructureType::eSparseImageFormatProperties2: return "SparseImageFormatProperties2";
-    case StructureType::ePhysicalDeviceSparseImageFormatInfo2: return "PhysicalDeviceSparseImageFormatInfo2";
-    case StructureType::ePhysicalDevicePointClippingProperties: return "PhysicalDevicePointClippingProperties";
-    case StructureType::eRenderPassInputAttachmentAspectCreateInfo: return "RenderPassInputAttachmentAspectCreateInfo";
-    case StructureType::eImageViewUsageCreateInfo: return "ImageViewUsageCreateInfo";
-    case StructureType::ePipelineTessellationDomainOriginStateCreateInfo: return "PipelineTessellationDomainOriginStateCreateInfo";
-    case StructureType::eRenderPassMultiviewCreateInfo: return "RenderPassMultiviewCreateInfo";
-    case StructureType::ePhysicalDeviceMultiviewFeatures: return "PhysicalDeviceMultiviewFeatures";
-    case StructureType::ePhysicalDeviceMultiviewProperties: return "PhysicalDeviceMultiviewProperties";
-    case StructureType::ePhysicalDeviceVariablePointerFeatures: return "PhysicalDeviceVariablePointerFeatures";
-    case StructureType::eProtectedSubmitInfo: return "ProtectedSubmitInfo";
-    case StructureType::ePhysicalDeviceProtectedMemoryFeatures: return "PhysicalDeviceProtectedMemoryFeatures";
-    case StructureType::ePhysicalDeviceProtectedMemoryProperties: return "PhysicalDeviceProtectedMemoryProperties";
-    case StructureType::eDeviceQueueInfo2: return "DeviceQueueInfo2";
-    case StructureType::eSamplerYcbcrConversionCreateInfo: return "SamplerYcbcrConversionCreateInfo";
-    case StructureType::eSamplerYcbcrConversionInfo: return "SamplerYcbcrConversionInfo";
-    case StructureType::eBindImagePlaneMemoryInfo: return "BindImagePlaneMemoryInfo";
-    case StructureType::eImagePlaneMemoryRequirementsInfo: return "ImagePlaneMemoryRequirementsInfo";
-    case StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures: return "PhysicalDeviceSamplerYcbcrConversionFeatures";
-    case StructureType::eSamplerYcbcrConversionImageFormatProperties: return "SamplerYcbcrConversionImageFormatProperties";
-    case StructureType::eDescriptorUpdateTemplateCreateInfo: return "DescriptorUpdateTemplateCreateInfo";
-    case StructureType::ePhysicalDeviceExternalImageFormatInfo: return "PhysicalDeviceExternalImageFormatInfo";
-    case StructureType::eExternalImageFormatProperties: return "ExternalImageFormatProperties";
-    case StructureType::ePhysicalDeviceExternalBufferInfo: return "PhysicalDeviceExternalBufferInfo";
-    case StructureType::eExternalBufferProperties: return "ExternalBufferProperties";
-    case StructureType::ePhysicalDeviceIdProperties: return "PhysicalDeviceIdProperties";
-    case StructureType::eExternalMemoryBufferCreateInfo: return "ExternalMemoryBufferCreateInfo";
-    case StructureType::eExternalMemoryImageCreateInfo: return "ExternalMemoryImageCreateInfo";
-    case StructureType::eExportMemoryAllocateInfo: return "ExportMemoryAllocateInfo";
-    case StructureType::ePhysicalDeviceExternalFenceInfo: return "PhysicalDeviceExternalFenceInfo";
-    case StructureType::eExternalFenceProperties: return "ExternalFenceProperties";
-    case StructureType::eExportFenceCreateInfo: return "ExportFenceCreateInfo";
-    case StructureType::eExportSemaphoreCreateInfo: return "ExportSemaphoreCreateInfo";
-    case StructureType::ePhysicalDeviceExternalSemaphoreInfo: return "PhysicalDeviceExternalSemaphoreInfo";
-    case StructureType::eExternalSemaphoreProperties: return "ExternalSemaphoreProperties";
-    case StructureType::ePhysicalDeviceMaintenance3Properties: return "PhysicalDeviceMaintenance3Properties";
-    case StructureType::eDescriptorSetLayoutSupport: return "DescriptorSetLayoutSupport";
-    case StructureType::ePhysicalDeviceShaderDrawParameterFeatures: return "PhysicalDeviceShaderDrawParameterFeatures";
-    case StructureType::eSwapchainCreateInfoKHR: return "SwapchainCreateInfoKHR";
-    case StructureType::ePresentInfoKHR: return "PresentInfoKHR";
-    case StructureType::eDeviceGroupPresentCapabilitiesKHR: return "DeviceGroupPresentCapabilitiesKHR";
-    case StructureType::eImageSwapchainCreateInfoKHR: return "ImageSwapchainCreateInfoKHR";
-    case StructureType::eBindImageMemorySwapchainInfoKHR: return "BindImageMemorySwapchainInfoKHR";
-    case StructureType::eAcquireNextImageInfoKHR: return "AcquireNextImageInfoKHR";
-    case StructureType::eDeviceGroupPresentInfoKHR: return "DeviceGroupPresentInfoKHR";
-    case StructureType::eDeviceGroupSwapchainCreateInfoKHR: return "DeviceGroupSwapchainCreateInfoKHR";
-    case StructureType::eDisplayModeCreateInfoKHR: return "DisplayModeCreateInfoKHR";
-    case StructureType::eDisplaySurfaceCreateInfoKHR: return "DisplaySurfaceCreateInfoKHR";
-    case StructureType::eDisplayPresentInfoKHR: return "DisplayPresentInfoKHR";
-    case StructureType::eXlibSurfaceCreateInfoKHR: return "XlibSurfaceCreateInfoKHR";
-    case StructureType::eXcbSurfaceCreateInfoKHR: return "XcbSurfaceCreateInfoKHR";
-    case StructureType::eWaylandSurfaceCreateInfoKHR: return "WaylandSurfaceCreateInfoKHR";
-    case StructureType::eAndroidSurfaceCreateInfoKHR: return "AndroidSurfaceCreateInfoKHR";
-    case StructureType::eWin32SurfaceCreateInfoKHR: return "Win32SurfaceCreateInfoKHR";
-    case StructureType::eDebugReportCallbackCreateInfoEXT: return "DebugReportCallbackCreateInfoEXT";
-    case StructureType::ePipelineRasterizationStateRasterizationOrderAMD: return "PipelineRasterizationStateRasterizationOrderAMD";
-    case StructureType::eDebugMarkerObjectNameInfoEXT: return "DebugMarkerObjectNameInfoEXT";
-    case StructureType::eDebugMarkerObjectTagInfoEXT: return "DebugMarkerObjectTagInfoEXT";
-    case StructureType::eDebugMarkerMarkerInfoEXT: return "DebugMarkerMarkerInfoEXT";
-    case StructureType::eDedicatedAllocationImageCreateInfoNV: return "DedicatedAllocationImageCreateInfoNV";
-    case StructureType::eDedicatedAllocationBufferCreateInfoNV: return "DedicatedAllocationBufferCreateInfoNV";
-    case StructureType::eDedicatedAllocationMemoryAllocateInfoNV: return "DedicatedAllocationMemoryAllocateInfoNV";
-    case StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT: return "PhysicalDeviceTransformFeedbackFeaturesEXT";
-    case StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT: return "PhysicalDeviceTransformFeedbackPropertiesEXT";
-    case StructureType::ePipelineRasterizationStateStreamCreateInfoEXT: return "PipelineRasterizationStateStreamCreateInfoEXT";
-    case StructureType::eTextureLodGatherFormatPropertiesAMD: return "TextureLodGatherFormatPropertiesAMD";
-    case StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV: return "PhysicalDeviceCornerSampledImageFeaturesNV";
-    case StructureType::eExternalMemoryImageCreateInfoNV: return "ExternalMemoryImageCreateInfoNV";
-    case StructureType::eExportMemoryAllocateInfoNV: return "ExportMemoryAllocateInfoNV";
-    case StructureType::eImportMemoryWin32HandleInfoNV: return "ImportMemoryWin32HandleInfoNV";
-    case StructureType::eExportMemoryWin32HandleInfoNV: return "ExportMemoryWin32HandleInfoNV";
-    case StructureType::eWin32KeyedMutexAcquireReleaseInfoNV: return "Win32KeyedMutexAcquireReleaseInfoNV";
-    case StructureType::eValidationFlagsEXT: return "ValidationFlagsEXT";
-    case StructureType::eViSurfaceCreateInfoNN: return "ViSurfaceCreateInfoNN";
-    case StructureType::eImageViewAstcDecodeModeEXT: return "ImageViewAstcDecodeModeEXT";
-    case StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT: return "PhysicalDeviceAstcDecodeFeaturesEXT";
-    case StructureType::eImportMemoryWin32HandleInfoKHR: return "ImportMemoryWin32HandleInfoKHR";
-    case StructureType::eExportMemoryWin32HandleInfoKHR: return "ExportMemoryWin32HandleInfoKHR";
-    case StructureType::eMemoryWin32HandlePropertiesKHR: return "MemoryWin32HandlePropertiesKHR";
-    case StructureType::eMemoryGetWin32HandleInfoKHR: return "MemoryGetWin32HandleInfoKHR";
-    case StructureType::eImportMemoryFdInfoKHR: return "ImportMemoryFdInfoKHR";
-    case StructureType::eMemoryFdPropertiesKHR: return "MemoryFdPropertiesKHR";
-    case StructureType::eMemoryGetFdInfoKHR: return "MemoryGetFdInfoKHR";
-    case StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR: return "Win32KeyedMutexAcquireReleaseInfoKHR";
-    case StructureType::eImportSemaphoreWin32HandleInfoKHR: return "ImportSemaphoreWin32HandleInfoKHR";
-    case StructureType::eExportSemaphoreWin32HandleInfoKHR: return "ExportSemaphoreWin32HandleInfoKHR";
-    case StructureType::eD3D12FenceSubmitInfoKHR: return "D3D12FenceSubmitInfoKHR";
-    case StructureType::eSemaphoreGetWin32HandleInfoKHR: return "SemaphoreGetWin32HandleInfoKHR";
-    case StructureType::eImportSemaphoreFdInfoKHR: return "ImportSemaphoreFdInfoKHR";
-    case StructureType::eSemaphoreGetFdInfoKHR: return "SemaphoreGetFdInfoKHR";
-    case StructureType::ePhysicalDevicePushDescriptorPropertiesKHR: return "PhysicalDevicePushDescriptorPropertiesKHR";
-    case StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT: return "CommandBufferInheritanceConditionalRenderingInfoEXT";
-    case StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT: return "PhysicalDeviceConditionalRenderingFeaturesEXT";
-    case StructureType::eConditionalRenderingBeginInfoEXT: return "ConditionalRenderingBeginInfoEXT";
-    case StructureType::ePresentRegionsKHR: return "PresentRegionsKHR";
-    case StructureType::eObjectTableCreateInfoNVX: return "ObjectTableCreateInfoNVX";
-    case StructureType::eIndirectCommandsLayoutCreateInfoNVX: return "IndirectCommandsLayoutCreateInfoNVX";
-    case StructureType::eCmdProcessCommandsInfoNVX: return "CmdProcessCommandsInfoNVX";
-    case StructureType::eCmdReserveSpaceForCommandsInfoNVX: return "CmdReserveSpaceForCommandsInfoNVX";
-    case StructureType::eDeviceGeneratedCommandsLimitsNVX: return "DeviceGeneratedCommandsLimitsNVX";
-    case StructureType::eDeviceGeneratedCommandsFeaturesNVX: return "DeviceGeneratedCommandsFeaturesNVX";
-    case StructureType::ePipelineViewportWScalingStateCreateInfoNV: return "PipelineViewportWScalingStateCreateInfoNV";
-    case StructureType::eSurfaceCapabilities2EXT: return "SurfaceCapabilities2EXT";
-    case StructureType::eDisplayPowerInfoEXT: return "DisplayPowerInfoEXT";
-    case StructureType::eDeviceEventInfoEXT: return "DeviceEventInfoEXT";
-    case StructureType::eDisplayEventInfoEXT: return "DisplayEventInfoEXT";
-    case StructureType::eSwapchainCounterCreateInfoEXT: return "SwapchainCounterCreateInfoEXT";
-    case StructureType::ePresentTimesInfoGOOGLE: return "PresentTimesInfoGOOGLE";
-    case StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX: return "PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX";
-    case StructureType::ePipelineViewportSwizzleStateCreateInfoNV: return "PipelineViewportSwizzleStateCreateInfoNV";
-    case StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT: return "PhysicalDeviceDiscardRectanglePropertiesEXT";
-    case StructureType::ePipelineDiscardRectangleStateCreateInfoEXT: return "PipelineDiscardRectangleStateCreateInfoEXT";
-    case StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT: return "PhysicalDeviceConservativeRasterizationPropertiesEXT";
-    case StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT: return "PipelineRasterizationConservativeStateCreateInfoEXT";
-    case StructureType::eHdrMetadataEXT: return "HdrMetadataEXT";
-    case StructureType::eAttachmentDescription2KHR: return "AttachmentDescription2KHR";
-    case StructureType::eAttachmentReference2KHR: return "AttachmentReference2KHR";
-    case StructureType::eSubpassDescription2KHR: return "SubpassDescription2KHR";
-    case StructureType::eSubpassDependency2KHR: return "SubpassDependency2KHR";
-    case StructureType::eRenderPassCreateInfo2KHR: return "RenderPassCreateInfo2KHR";
-    case StructureType::eSubpassBeginInfoKHR: return "SubpassBeginInfoKHR";
-    case StructureType::eSubpassEndInfoKHR: return "SubpassEndInfoKHR";
-    case StructureType::eSharedPresentSurfaceCapabilitiesKHR: return "SharedPresentSurfaceCapabilitiesKHR";
-    case StructureType::eImportFenceWin32HandleInfoKHR: return "ImportFenceWin32HandleInfoKHR";
-    case StructureType::eExportFenceWin32HandleInfoKHR: return "ExportFenceWin32HandleInfoKHR";
-    case StructureType::eFenceGetWin32HandleInfoKHR: return "FenceGetWin32HandleInfoKHR";
-    case StructureType::eImportFenceFdInfoKHR: return "ImportFenceFdInfoKHR";
-    case StructureType::eFenceGetFdInfoKHR: return "FenceGetFdInfoKHR";
-    case StructureType::ePhysicalDeviceSurfaceInfo2KHR: return "PhysicalDeviceSurfaceInfo2KHR";
-    case StructureType::eSurfaceCapabilities2KHR: return "SurfaceCapabilities2KHR";
-    case StructureType::eSurfaceFormat2KHR: return "SurfaceFormat2KHR";
-    case StructureType::eDisplayProperties2KHR: return "DisplayProperties2KHR";
-    case StructureType::eDisplayPlaneProperties2KHR: return "DisplayPlaneProperties2KHR";
-    case StructureType::eDisplayModeProperties2KHR: return "DisplayModeProperties2KHR";
-    case StructureType::eDisplayPlaneInfo2KHR: return "DisplayPlaneInfo2KHR";
-    case StructureType::eDisplayPlaneCapabilities2KHR: return "DisplayPlaneCapabilities2KHR";
-    case StructureType::eIosSurfaceCreateInfoMVK: return "IosSurfaceCreateInfoMVK";
-    case StructureType::eMacosSurfaceCreateInfoMVK: return "MacosSurfaceCreateInfoMVK";
-    case StructureType::eDebugUtilsObjectNameInfoEXT: return "DebugUtilsObjectNameInfoEXT";
-    case StructureType::eDebugUtilsObjectTagInfoEXT: return "DebugUtilsObjectTagInfoEXT";
-    case StructureType::eDebugUtilsLabelEXT: return "DebugUtilsLabelEXT";
-    case StructureType::eDebugUtilsMessengerCallbackDataEXT: return "DebugUtilsMessengerCallbackDataEXT";
-    case StructureType::eDebugUtilsMessengerCreateInfoEXT: return "DebugUtilsMessengerCreateInfoEXT";
-    case StructureType::eAndroidHardwareBufferUsageANDROID: return "AndroidHardwareBufferUsageANDROID";
-    case StructureType::eAndroidHardwareBufferPropertiesANDROID: return "AndroidHardwareBufferPropertiesANDROID";
-    case StructureType::eAndroidHardwareBufferFormatPropertiesANDROID: return "AndroidHardwareBufferFormatPropertiesANDROID";
-    case StructureType::eImportAndroidHardwareBufferInfoANDROID: return "ImportAndroidHardwareBufferInfoANDROID";
-    case StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID: return "MemoryGetAndroidHardwareBufferInfoANDROID";
-    case StructureType::eExternalFormatANDROID: return "ExternalFormatANDROID";
-    case StructureType::ePhysicalDeviceSamplerFilterMinmaxPropertiesEXT: return "PhysicalDeviceSamplerFilterMinmaxPropertiesEXT";
-    case StructureType::eSamplerReductionModeCreateInfoEXT: return "SamplerReductionModeCreateInfoEXT";
-    case StructureType::ePhysicalDeviceInlineUniformBlockFeaturesEXT: return "PhysicalDeviceInlineUniformBlockFeaturesEXT";
-    case StructureType::ePhysicalDeviceInlineUniformBlockPropertiesEXT: return "PhysicalDeviceInlineUniformBlockPropertiesEXT";
-    case StructureType::eWriteDescriptorSetInlineUniformBlockEXT: return "WriteDescriptorSetInlineUniformBlockEXT";
-    case StructureType::eDescriptorPoolInlineUniformBlockCreateInfoEXT: return "DescriptorPoolInlineUniformBlockCreateInfoEXT";
-    case StructureType::eSampleLocationsInfoEXT: return "SampleLocationsInfoEXT";
-    case StructureType::eRenderPassSampleLocationsBeginInfoEXT: return "RenderPassSampleLocationsBeginInfoEXT";
-    case StructureType::ePipelineSampleLocationsStateCreateInfoEXT: return "PipelineSampleLocationsStateCreateInfoEXT";
-    case StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT: return "PhysicalDeviceSampleLocationsPropertiesEXT";
-    case StructureType::eMultisamplePropertiesEXT: return "MultisamplePropertiesEXT";
-    case StructureType::eImageFormatListCreateInfoKHR: return "ImageFormatListCreateInfoKHR";
-    case StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT: return "PhysicalDeviceBlendOperationAdvancedFeaturesEXT";
-    case StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT: return "PhysicalDeviceBlendOperationAdvancedPropertiesEXT";
-    case StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT: return "PipelineColorBlendAdvancedStateCreateInfoEXT";
-    case StructureType::ePipelineCoverageToColorStateCreateInfoNV: return "PipelineCoverageToColorStateCreateInfoNV";
-    case StructureType::ePipelineCoverageModulationStateCreateInfoNV: return "PipelineCoverageModulationStateCreateInfoNV";
-    case StructureType::eDrmFormatModifierPropertiesListEXT: return "DrmFormatModifierPropertiesListEXT";
-    case StructureType::eDrmFormatModifierPropertiesEXT: return "DrmFormatModifierPropertiesEXT";
-    case StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT: return "PhysicalDeviceImageDrmFormatModifierInfoEXT";
-    case StructureType::eImageDrmFormatModifierListCreateInfoEXT: return "ImageDrmFormatModifierListCreateInfoEXT";
-    case StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT: return "ImageDrmFormatModifierExplicitCreateInfoEXT";
-    case StructureType::eImageDrmFormatModifierPropertiesEXT: return "ImageDrmFormatModifierPropertiesEXT";
-    case StructureType::eValidationCacheCreateInfoEXT: return "ValidationCacheCreateInfoEXT";
-    case StructureType::eShaderModuleValidationCacheCreateInfoEXT: return "ShaderModuleValidationCacheCreateInfoEXT";
-    case StructureType::eDescriptorSetLayoutBindingFlagsCreateInfoEXT: return "DescriptorSetLayoutBindingFlagsCreateInfoEXT";
-    case StructureType::ePhysicalDeviceDescriptorIndexingFeaturesEXT: return "PhysicalDeviceDescriptorIndexingFeaturesEXT";
-    case StructureType::ePhysicalDeviceDescriptorIndexingPropertiesEXT: return "PhysicalDeviceDescriptorIndexingPropertiesEXT";
-    case StructureType::eDescriptorSetVariableDescriptorCountAllocateInfoEXT: return "DescriptorSetVariableDescriptorCountAllocateInfoEXT";
-    case StructureType::eDescriptorSetVariableDescriptorCountLayoutSupportEXT: return "DescriptorSetVariableDescriptorCountLayoutSupportEXT";
-    case StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV: return "PipelineViewportShadingRateImageStateCreateInfoNV";
-    case StructureType::ePhysicalDeviceShadingRateImageFeaturesNV: return "PhysicalDeviceShadingRateImageFeaturesNV";
-    case StructureType::ePhysicalDeviceShadingRateImagePropertiesNV: return "PhysicalDeviceShadingRateImagePropertiesNV";
-    case StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV: return "PipelineViewportCoarseSampleOrderStateCreateInfoNV";
-    case StructureType::eRayTracingPipelineCreateInfoNV: return "RayTracingPipelineCreateInfoNV";
-    case StructureType::eAccelerationStructureCreateInfoNV: return "AccelerationStructureCreateInfoNV";
-    case StructureType::eGeometryNV: return "GeometryNV";
-    case StructureType::eGeometryTrianglesNV: return "GeometryTrianglesNV";
-    case StructureType::eGeometryAabbNV: return "GeometryAabbNV";
-    case StructureType::eBindAccelerationStructureMemoryInfoNV: return "BindAccelerationStructureMemoryInfoNV";
-    case StructureType::eWriteDescriptorSetAccelerationStructureNV: return "WriteDescriptorSetAccelerationStructureNV";
-    case StructureType::eAccelerationStructureMemoryRequirementsInfoNV: return "AccelerationStructureMemoryRequirementsInfoNV";
-    case StructureType::ePhysicalDeviceRayTracingPropertiesNV: return "PhysicalDeviceRayTracingPropertiesNV";
-    case StructureType::eRayTracingShaderGroupCreateInfoNV: return "RayTracingShaderGroupCreateInfoNV";
-    case StructureType::eAccelerationStructureInfoNV: return "AccelerationStructureInfoNV";
-    case StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV: return "PhysicalDeviceRepresentativeFragmentTestFeaturesNV";
-    case StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV: return "PipelineRepresentativeFragmentTestStateCreateInfoNV";
-    case StructureType::eDeviceQueueGlobalPriorityCreateInfoEXT: return "DeviceQueueGlobalPriorityCreateInfoEXT";
-    case StructureType::ePhysicalDevice8BitStorageFeaturesKHR: return "PhysicalDevice8BitStorageFeaturesKHR";
-    case StructureType::eImportMemoryHostPointerInfoEXT: return "ImportMemoryHostPointerInfoEXT";
-    case StructureType::eMemoryHostPointerPropertiesEXT: return "MemoryHostPointerPropertiesEXT";
-    case StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT: return "PhysicalDeviceExternalMemoryHostPropertiesEXT";
-    case StructureType::ePhysicalDeviceShaderAtomicInt64FeaturesKHR: return "PhysicalDeviceShaderAtomicInt64FeaturesKHR";
-    case StructureType::eCalibratedTimestampInfoEXT: return "CalibratedTimestampInfoEXT";
-    case StructureType::ePhysicalDeviceShaderCorePropertiesAMD: return "PhysicalDeviceShaderCorePropertiesAMD";
-    case StructureType::eDeviceMemoryOverallocationCreateInfoAMD: return "DeviceMemoryOverallocationCreateInfoAMD";
-    case StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT: return "PhysicalDeviceVertexAttributeDivisorPropertiesEXT";
-    case StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT: return "PipelineVertexInputDivisorStateCreateInfoEXT";
-    case StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT: return "PhysicalDeviceVertexAttributeDivisorFeaturesEXT";
-    case StructureType::ePhysicalDeviceDriverPropertiesKHR: return "PhysicalDeviceDriverPropertiesKHR";
-    case StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV: return "PhysicalDeviceComputeShaderDerivativesFeaturesNV";
-    case StructureType::ePhysicalDeviceMeshShaderFeaturesNV: return "PhysicalDeviceMeshShaderFeaturesNV";
-    case StructureType::ePhysicalDeviceMeshShaderPropertiesNV: return "PhysicalDeviceMeshShaderPropertiesNV";
-    case StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesNV: return "PhysicalDeviceFragmentShaderBarycentricFeaturesNV";
-    case StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV: return "PhysicalDeviceShaderImageFootprintFeaturesNV";
-    case StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV: return "PipelineViewportExclusiveScissorStateCreateInfoNV";
-    case StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV: return "PhysicalDeviceExclusiveScissorFeaturesNV";
-    case StructureType::eCheckpointDataNV: return "CheckpointDataNV";
-    case StructureType::eQueueFamilyCheckpointPropertiesNV: return "QueueFamilyCheckpointPropertiesNV";
-    case StructureType::ePhysicalDeviceVulkanMemoryModelFeaturesKHR: return "PhysicalDeviceVulkanMemoryModelFeaturesKHR";
-    case StructureType::ePhysicalDevicePciBusInfoPropertiesEXT: return "PhysicalDevicePciBusInfoPropertiesEXT";
-    case StructureType::eImagepipeSurfaceCreateInfoFUCHSIA: return "ImagepipeSurfaceCreateInfoFUCHSIA";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(SubpassContents value)
-  {
-    switch (value)
-    {
-    case SubpassContents::eInline: return "Inline";
-    case SubpassContents::eSecondaryCommandBuffers: return "SecondaryCommandBuffers";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(DynamicState value)
-  {
-    switch (value)
-    {
-    case DynamicState::eViewport: return "Viewport";
-    case DynamicState::eScissor: return "Scissor";
-    case DynamicState::eLineWidth: return "LineWidth";
-    case DynamicState::eDepthBias: return "DepthBias";
-    case DynamicState::eBlendConstants: return "BlendConstants";
-    case DynamicState::eDepthBounds: return "DepthBounds";
-    case DynamicState::eStencilCompareMask: return "StencilCompareMask";
-    case DynamicState::eStencilWriteMask: return "StencilWriteMask";
-    case DynamicState::eStencilReference: return "StencilReference";
-    case DynamicState::eViewportWScalingNV: return "ViewportWScalingNV";
-    case DynamicState::eDiscardRectangleEXT: return "DiscardRectangleEXT";
-    case DynamicState::eSampleLocationsEXT: return "SampleLocationsEXT";
-    case DynamicState::eViewportShadingRatePaletteNV: return "ViewportShadingRatePaletteNV";
-    case DynamicState::eViewportCoarseSampleOrderNV: return "ViewportCoarseSampleOrderNV";
-    case DynamicState::eExclusiveScissorNV: return "ExclusiveScissorNV";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(DescriptorUpdateTemplateType value)
-  {
-    switch (value)
-    {
-    case DescriptorUpdateTemplateType::eDescriptorSet: return "DescriptorSet";
-    case DescriptorUpdateTemplateType::ePushDescriptorsKHR: return "PushDescriptorsKHR";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(ObjectType value)
-  {
-    switch (value)
-    {
-    case ObjectType::eUnknown: return "Unknown";
-    case ObjectType::eInstance: return "Instance";
-    case ObjectType::ePhysicalDevice: return "PhysicalDevice";
-    case ObjectType::eDevice: return "Device";
-    case ObjectType::eQueue: return "Queue";
-    case ObjectType::eSemaphore: return "Semaphore";
-    case ObjectType::eCommandBuffer: return "CommandBuffer";
-    case ObjectType::eFence: return "Fence";
-    case ObjectType::eDeviceMemory: return "DeviceMemory";
-    case ObjectType::eBuffer: return "Buffer";
-    case ObjectType::eImage: return "Image";
-    case ObjectType::eEvent: return "Event";
-    case ObjectType::eQueryPool: return "QueryPool";
-    case ObjectType::eBufferView: return "BufferView";
-    case ObjectType::eImageView: return "ImageView";
-    case ObjectType::eShaderModule: return "ShaderModule";
-    case ObjectType::ePipelineCache: return "PipelineCache";
-    case ObjectType::ePipelineLayout: return "PipelineLayout";
-    case ObjectType::eRenderPass: return "RenderPass";
-    case ObjectType::ePipeline: return "Pipeline";
-    case ObjectType::eDescriptorSetLayout: return "DescriptorSetLayout";
-    case ObjectType::eSampler: return "Sampler";
-    case ObjectType::eDescriptorPool: return "DescriptorPool";
-    case ObjectType::eDescriptorSet: return "DescriptorSet";
-    case ObjectType::eFramebuffer: return "Framebuffer";
-    case ObjectType::eCommandPool: return "CommandPool";
-    case ObjectType::eSamplerYcbcrConversion: return "SamplerYcbcrConversion";
-    case ObjectType::eDescriptorUpdateTemplate: return "DescriptorUpdateTemplate";
-    case ObjectType::eSurfaceKHR: return "SurfaceKHR";
-    case ObjectType::eSwapchainKHR: return "SwapchainKHR";
-    case ObjectType::eDisplayKHR: return "DisplayKHR";
-    case ObjectType::eDisplayModeKHR: return "DisplayModeKHR";
-    case ObjectType::eDebugReportCallbackEXT: return "DebugReportCallbackEXT";
-    case ObjectType::eObjectTableNVX: return "ObjectTableNVX";
-    case ObjectType::eIndirectCommandsLayoutNVX: return "IndirectCommandsLayoutNVX";
-    case ObjectType::eDebugUtilsMessengerEXT: return "DebugUtilsMessengerEXT";
-    case ObjectType::eValidationCacheEXT: return "ValidationCacheEXT";
-    case ObjectType::eAccelerationStructureNV: return "AccelerationStructureNV";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(QueueFlagBits value)
-  {
-    switch (value)
-    {
-    case QueueFlagBits::eGraphics: return "Graphics";
-    case QueueFlagBits::eCompute: return "Compute";
-    case QueueFlagBits::eTransfer: return "Transfer";
-    case QueueFlagBits::eSparseBinding: return "SparseBinding";
-    case QueueFlagBits::eProtected: return "Protected";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(QueueFlags value)
-  {
-    if (!value) return "{}";
-    std::string result;
-    if (value & QueueFlagBits::eGraphics) result += "Graphics | ";
-    if (value & QueueFlagBits::eCompute) result += "Compute | ";
-    if (value & QueueFlagBits::eTransfer) result += "Transfer | ";
-    if (value & QueueFlagBits::eSparseBinding) result += "SparseBinding | ";
-    if (value & QueueFlagBits::eProtected) result += "Protected | ";
-    return "{" + result.substr(0, result.size() - 3) + "}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(DeviceQueueCreateFlagBits value)
-  {
-    switch (value)
-    {
-    case DeviceQueueCreateFlagBits::eProtected: return "Protected";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(DeviceQueueCreateFlags value)
-  {
-    if (!value) return "{}";
-    std::string result;
-    if (value & DeviceQueueCreateFlagBits::eProtected) result += "Protected | ";
-    return "{" + result.substr(0, result.size() - 3) + "}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(MemoryPropertyFlagBits value)
-  {
-    switch (value)
-    {
-    case MemoryPropertyFlagBits::eDeviceLocal: return "DeviceLocal";
-    case MemoryPropertyFlagBits::eHostVisible: return "HostVisible";
-    case MemoryPropertyFlagBits::eHostCoherent: return "HostCoherent";
-    case MemoryPropertyFlagBits::eHostCached: return "HostCached";
-    case MemoryPropertyFlagBits::eLazilyAllocated: return "LazilyAllocated";
-    case MemoryPropertyFlagBits::eProtected: return "Protected";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(MemoryPropertyFlags value)
-  {
-    if (!value) return "{}";
-    std::string result;
-    if (value & MemoryPropertyFlagBits::eDeviceLocal) result += "DeviceLocal | ";
-    if (value & MemoryPropertyFlagBits::eHostVisible) result += "HostVisible | ";
-    if (value & MemoryPropertyFlagBits::eHostCoherent) result += "HostCoherent | ";
-    if (value & MemoryPropertyFlagBits::eHostCached) result += "HostCached | ";
-    if (value & MemoryPropertyFlagBits::eLazilyAllocated) result += "LazilyAllocated | ";
-    if (value & MemoryPropertyFlagBits::eProtected) result += "Protected | ";
-    return "{" + result.substr(0, result.size() - 3) + "}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(MemoryHeapFlagBits value)
-  {
-    switch (value)
-    {
-    case MemoryHeapFlagBits::eDeviceLocal: return "DeviceLocal";
-    case MemoryHeapFlagBits::eMultiInstance: return "MultiInstance";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(MemoryHeapFlags value)
-  {
-    if (!value) return "{}";
-    std::string result;
-    if (value & MemoryHeapFlagBits::eDeviceLocal) result += "DeviceLocal | ";
-    if (value & MemoryHeapFlagBits::eMultiInstance) result += "MultiInstance | ";
-    return "{" + result.substr(0, result.size() - 3) + "}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(AccessFlagBits value)
-  {
-    switch (value)
-    {
-    case AccessFlagBits::eIndirectCommandRead: return "IndirectCommandRead";
-    case AccessFlagBits::eIndexRead: return "IndexRead";
-    case AccessFlagBits::eVertexAttributeRead: return "VertexAttributeRead";
-    case AccessFlagBits::eUniformRead: return "UniformRead";
-    case AccessFlagBits::eInputAttachmentRead: return "InputAttachmentRead";
-    case AccessFlagBits::eShaderRead: return "ShaderRead";
-    case AccessFlagBits::eShaderWrite: return "ShaderWrite";
-    case AccessFlagBits::eColorAttachmentRead: return "ColorAttachmentRead";
-    case AccessFlagBits::eColorAttachmentWrite: return "ColorAttachmentWrite";
-    case AccessFlagBits::eDepthStencilAttachmentRead: return "DepthStencilAttachmentRead";
-    case AccessFlagBits::eDepthStencilAttachmentWrite: return "DepthStencilAttachmentWrite";
-    case AccessFlagBits::eTransferRead: return "TransferRead";
-    case AccessFlagBits::eTransferWrite: return "TransferWrite";
-    case AccessFlagBits::eHostRead: return "HostRead";
-    case AccessFlagBits::eHostWrite: return "HostWrite";
-    case AccessFlagBits::eMemoryRead: return "MemoryRead";
-    case AccessFlagBits::eMemoryWrite: return "MemoryWrite";
-    case AccessFlagBits::eTransformFeedbackWriteEXT: return "TransformFeedbackWriteEXT";
-    case AccessFlagBits::eTransformFeedbackCounterReadEXT: return "TransformFeedbackCounterReadEXT";
-    case AccessFlagBits::eTransformFeedbackCounterWriteEXT: return "TransformFeedbackCounterWriteEXT";
-    case AccessFlagBits::eConditionalRenderingReadEXT: return "ConditionalRenderingReadEXT";
-    case AccessFlagBits::eCommandProcessReadNVX: return "CommandProcessReadNVX";
-    case AccessFlagBits::eCommandProcessWriteNVX: return "CommandProcessWriteNVX";
-    case AccessFlagBits::eColorAttachmentReadNoncoherentEXT: return "ColorAttachmentReadNoncoherentEXT";
-    case AccessFlagBits::eShadingRateImageReadNV: return "ShadingRateImageReadNV";
-    case AccessFlagBits::eAccelerationStructureReadNV: return "AccelerationStructureReadNV";
-    case AccessFlagBits::eAccelerationStructureWriteNV: return "AccelerationStructureWriteNV";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(AccessFlags value)
-  {
-    if (!value) return "{}";
-    std::string result;
-    if (value & AccessFlagBits::eIndirectCommandRead) result += "IndirectCommandRead | ";
-    if (value & AccessFlagBits::eIndexRead) result += "IndexRead | ";
-    if (value & AccessFlagBits::eVertexAttributeRead) result += "VertexAttributeRead | ";
-    if (value & AccessFlagBits::eUniformRead) result += "UniformRead | ";
-    if (value & AccessFlagBits::eInputAttachmentRead) result += "InputAttachmentRead | ";
-    if (value & AccessFlagBits::eShaderRead) result += "ShaderRead | ";
-    if (value & AccessFlagBits::eShaderWrite) result += "ShaderWrite | ";
-    if (value & AccessFlagBits::eColorAttachmentRead) result += "ColorAttachmentRead | ";
-    if (value & AccessFlagBits::eColorAttachmentWrite) result += "ColorAttachmentWrite | ";
-    if (value & AccessFlagBits::eDepthStencilAttachmentRead) result += "DepthStencilAttachmentRead | ";
-    if (value & AccessFlagBits::eDepthStencilAttachmentWrite) result += "DepthStencilAttachmentWrite | ";
-    if (value & AccessFlagBits::eTransferRead) result += "TransferRead | ";
-    if (value & AccessFlagBits::eTransferWrite) result += "TransferWrite | ";
-    if (value & AccessFlagBits::eHostRead) result += "HostRead | ";
-    if (value & AccessFlagBits::eHostWrite) result += "HostWrite | ";
-    if (value & AccessFlagBits::eMemoryRead) result += "MemoryRead | ";
-    if (value & AccessFlagBits::eMemoryWrite) result += "MemoryWrite | ";
-    if (value & AccessFlagBits::eTransformFeedbackWriteEXT) result += "TransformFeedbackWriteEXT | ";
-    if (value & AccessFlagBits::eTransformFeedbackCounterReadEXT) result += "TransformFeedbackCounterReadEXT | ";
-    if (value & AccessFlagBits::eTransformFeedbackCounterWriteEXT) result += "TransformFeedbackCounterWriteEXT | ";
-    if (value & AccessFlagBits::eConditionalRenderingReadEXT) result += "ConditionalRenderingReadEXT | ";
-    if (value & AccessFlagBits::eCommandProcessReadNVX) result += "CommandProcessReadNVX | ";
-    if (value & AccessFlagBits::eCommandProcessWriteNVX) result += "CommandProcessWriteNVX | ";
-    if (value & AccessFlagBits::eColorAttachmentReadNoncoherentEXT) result += "ColorAttachmentReadNoncoherentEXT | ";
-    if (value & AccessFlagBits::eShadingRateImageReadNV) result += "ShadingRateImageReadNV | ";
-    if (value & AccessFlagBits::eAccelerationStructureReadNV) result += "AccelerationStructureReadNV | ";
-    if (value & AccessFlagBits::eAccelerationStructureWriteNV) result += "AccelerationStructureWriteNV | ";
-    return "{" + result.substr(0, result.size() - 3) + "}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(BufferUsageFlagBits value)
-  {
-    switch (value)
-    {
-    case BufferUsageFlagBits::eTransferSrc: return "TransferSrc";
-    case BufferUsageFlagBits::eTransferDst: return "TransferDst";
-    case BufferUsageFlagBits::eUniformTexelBuffer: return "UniformTexelBuffer";
-    case BufferUsageFlagBits::eStorageTexelBuffer: return "StorageTexelBuffer";
-    case BufferUsageFlagBits::eUniformBuffer: return "UniformBuffer";
-    case BufferUsageFlagBits::eStorageBuffer: return "StorageBuffer";
-    case BufferUsageFlagBits::eIndexBuffer: return "IndexBuffer";
-    case BufferUsageFlagBits::eVertexBuffer: return "VertexBuffer";
-    case BufferUsageFlagBits::eIndirectBuffer: return "IndirectBuffer";
-    case BufferUsageFlagBits::eTransformFeedbackBufferEXT: return "TransformFeedbackBufferEXT";
-    case BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT: return "TransformFeedbackCounterBufferEXT";
-    case BufferUsageFlagBits::eConditionalRenderingEXT: return "ConditionalRenderingEXT";
-    case BufferUsageFlagBits::eRayTracingNV: return "RayTracingNV";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(BufferUsageFlags value)
-  {
-    if (!value) return "{}";
-    std::string result;
-    if (value & BufferUsageFlagBits::eTransferSrc) result += "TransferSrc | ";
-    if (value & BufferUsageFlagBits::eTransferDst) result += "TransferDst | ";
-    if (value & BufferUsageFlagBits::eUniformTexelBuffer) result += "UniformTexelBuffer | ";
-    if (value & BufferUsageFlagBits::eStorageTexelBuffer) result += "StorageTexelBuffer | ";
-    if (value & BufferUsageFlagBits::eUniformBuffer) result += "UniformBuffer | ";
-    if (value & BufferUsageFlagBits::eStorageBuffer) result += "StorageBuffer | ";
-    if (value & BufferUsageFlagBits::eIndexBuffer) result += "IndexBuffer | ";
-    if (value & BufferUsageFlagBits::eVertexBuffer) result += "VertexBuffer | ";
-    if (value & BufferUsageFlagBits::eIndirectBuffer) result += "IndirectBuffer | ";
-    if (value & BufferUsageFlagBits::eTransformFeedbackBufferEXT) result += "TransformFeedbackBufferEXT | ";
-    if (value & BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT) result += "TransformFeedbackCounterBufferEXT | ";
-    if (value & BufferUsageFlagBits::eConditionalRenderingEXT) result += "ConditionalRenderingEXT | ";
-    if (value & BufferUsageFlagBits::eRayTracingNV) result += "RayTracingNV | ";
-    return "{" + result.substr(0, result.size() - 3) + "}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(BufferCreateFlagBits value)
-  {
-    switch (value)
-    {
-    case BufferCreateFlagBits::eSparseBinding: return "SparseBinding";
-    case BufferCreateFlagBits::eSparseResidency: return "SparseResidency";
-    case BufferCreateFlagBits::eSparseAliased: return "SparseAliased";
-    case BufferCreateFlagBits::eProtected: return "Protected";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(BufferCreateFlags value)
-  {
-    if (!value) return "{}";
-    std::string result;
-    if (value & BufferCreateFlagBits::eSparseBinding) result += "SparseBinding | ";
-    if (value & BufferCreateFlagBits::eSparseResidency) result += "SparseResidency | ";
-    if (value & BufferCreateFlagBits::eSparseAliased) result += "SparseAliased | ";
-    if (value & BufferCreateFlagBits::eProtected) result += "Protected | ";
-    return "{" + result.substr(0, result.size() - 3) + "}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(ShaderStageFlagBits value)
-  {
-    switch (value)
-    {
-    case ShaderStageFlagBits::eVertex: return "Vertex";
-    case ShaderStageFlagBits::eTessellationControl: return "TessellationControl";
-    case ShaderStageFlagBits::eTessellationEvaluation: return "TessellationEvaluation";
-    case ShaderStageFlagBits::eGeometry: return "Geometry";
-    case ShaderStageFlagBits::eFragment: return "Fragment";
-    case ShaderStageFlagBits::eCompute: return "Compute";
-    case ShaderStageFlagBits::eAllGraphics: return "AllGraphics";
-    case ShaderStageFlagBits::eAll: return "All";
-    case ShaderStageFlagBits::eRaygenNV: return "RaygenNV";
-    case ShaderStageFlagBits::eAnyHitNV: return "AnyHitNV";
-    case ShaderStageFlagBits::eClosestHitNV: return "ClosestHitNV";
-    case ShaderStageFlagBits::eMissNV: return "MissNV";
-    case ShaderStageFlagBits::eIntersectionNV: return "IntersectionNV";
-    case ShaderStageFlagBits::eCallableNV: return "CallableNV";
-    case ShaderStageFlagBits::eTaskNV: return "TaskNV";
-    case ShaderStageFlagBits::eMeshNV: return "MeshNV";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(ShaderStageFlags value)
-  {
-    if (!value) return "{}";
-    std::string result;
-    if (value & ShaderStageFlagBits::eVertex) result += "Vertex | ";
-    if (value & ShaderStageFlagBits::eTessellationControl) result += "TessellationControl | ";
-    if (value & ShaderStageFlagBits::eTessellationEvaluation) result += "TessellationEvaluation | ";
-    if (value & ShaderStageFlagBits::eGeometry) result += "Geometry | ";
-    if (value & ShaderStageFlagBits::eFragment) result += "Fragment | ";
-    if (value & ShaderStageFlagBits::eCompute) result += "Compute | ";
-    if (value & ShaderStageFlagBits::eAllGraphics) result += "AllGraphics | ";
-    if (value & ShaderStageFlagBits::eAll) result += "All | ";
-    if (value & ShaderStageFlagBits::eRaygenNV) result += "RaygenNV | ";
-    if (value & ShaderStageFlagBits::eAnyHitNV) result += "AnyHitNV | ";
-    if (value & ShaderStageFlagBits::eClosestHitNV) result += "ClosestHitNV | ";
-    if (value & ShaderStageFlagBits::eMissNV) result += "MissNV | ";
-    if (value & ShaderStageFlagBits::eIntersectionNV) result += "IntersectionNV | ";
-    if (value & ShaderStageFlagBits::eCallableNV) result += "CallableNV | ";
-    if (value & ShaderStageFlagBits::eTaskNV) result += "TaskNV | ";
-    if (value & ShaderStageFlagBits::eMeshNV) result += "MeshNV | ";
-    return "{" + result.substr(0, result.size() - 3) + "}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(ImageUsageFlagBits value)
-  {
-    switch (value)
-    {
-    case ImageUsageFlagBits::eTransferSrc: return "TransferSrc";
-    case ImageUsageFlagBits::eTransferDst: return "TransferDst";
-    case ImageUsageFlagBits::eSampled: return "Sampled";
-    case ImageUsageFlagBits::eStorage: return "Storage";
-    case ImageUsageFlagBits::eColorAttachment: return "ColorAttachment";
-    case ImageUsageFlagBits::eDepthStencilAttachment: return "DepthStencilAttachment";
-    case ImageUsageFlagBits::eTransientAttachment: return "TransientAttachment";
-    case ImageUsageFlagBits::eInputAttachment: return "InputAttachment";
-    case ImageUsageFlagBits::eShadingRateImageNV: return "ShadingRateImageNV";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(ImageUsageFlags value)
-  {
-    if (!value) return "{}";
-    std::string result;
-    if (value & ImageUsageFlagBits::eTransferSrc) result += "TransferSrc | ";
-    if (value & ImageUsageFlagBits::eTransferDst) result += "TransferDst | ";
-    if (value & ImageUsageFlagBits::eSampled) result += "Sampled | ";
-    if (value & ImageUsageFlagBits::eStorage) result += "Storage | ";
-    if (value & ImageUsageFlagBits::eColorAttachment) result += "ColorAttachment | ";
-    if (value & ImageUsageFlagBits::eDepthStencilAttachment) result += "DepthStencilAttachment | ";
-    if (value & ImageUsageFlagBits::eTransientAttachment) result += "TransientAttachment | ";
-    if (value & ImageUsageFlagBits::eInputAttachment) result += "InputAttachment | ";
-    if (value & ImageUsageFlagBits::eShadingRateImageNV) result += "ShadingRateImageNV | ";
-    return "{" + result.substr(0, result.size() - 3) + "}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(ImageCreateFlagBits value)
-  {
-    switch (value)
-    {
-    case ImageCreateFlagBits::eSparseBinding: return "SparseBinding";
-    case ImageCreateFlagBits::eSparseResidency: return "SparseResidency";
-    case ImageCreateFlagBits::eSparseAliased: return "SparseAliased";
-    case ImageCreateFlagBits::eMutableFormat: return "MutableFormat";
-    case ImageCreateFlagBits::eCubeCompatible: return "CubeCompatible";
-    case ImageCreateFlagBits::eAlias: return "Alias";
-    case ImageCreateFlagBits::eSplitInstanceBindRegions: return "SplitInstanceBindRegions";
-    case ImageCreateFlagBits::e2DArrayCompatible: return "2DArrayCompatible";
-    case ImageCreateFlagBits::eBlockTexelViewCompatible: return "BlockTexelViewCompatible";
-    case ImageCreateFlagBits::eExtendedUsage: return "ExtendedUsage";
-    case ImageCreateFlagBits::eProtected: return "Protected";
-    case ImageCreateFlagBits::eDisjoint: return "Disjoint";
-    case ImageCreateFlagBits::eCornerSampledNV: return "CornerSampledNV";
-    case ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT: return "SampleLocationsCompatibleDepthEXT";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(ImageCreateFlags value)
-  {
-    if (!value) return "{}";
-    std::string result;
-    if (value & ImageCreateFlagBits::eSparseBinding) result += "SparseBinding | ";
-    if (value & ImageCreateFlagBits::eSparseResidency) result += "SparseResidency | ";
-    if (value & ImageCreateFlagBits::eSparseAliased) result += "SparseAliased | ";
-    if (value & ImageCreateFlagBits::eMutableFormat) result += "MutableFormat | ";
-    if (value & ImageCreateFlagBits::eCubeCompatible) result += "CubeCompatible | ";
-    if (value & ImageCreateFlagBits::eAlias) result += "Alias | ";
-    if (value & ImageCreateFlagBits::eSplitInstanceBindRegions) result += "SplitInstanceBindRegions | ";
-    if (value & ImageCreateFlagBits::e2DArrayCompatible) result += "2DArrayCompatible | ";
-    if (value & ImageCreateFlagBits::eBlockTexelViewCompatible) result += "BlockTexelViewCompatible | ";
-    if (value & ImageCreateFlagBits::eExtendedUsage) result += "ExtendedUsage | ";
-    if (value & ImageCreateFlagBits::eProtected) result += "Protected | ";
-    if (value & ImageCreateFlagBits::eDisjoint) result += "Disjoint | ";
-    if (value & ImageCreateFlagBits::eCornerSampledNV) result += "CornerSampledNV | ";
-    if (value & ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT) result += "SampleLocationsCompatibleDepthEXT | ";
-    return "{" + result.substr(0, result.size() - 3) + "}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(PipelineCreateFlagBits value)
-  {
-    switch (value)
-    {
-    case PipelineCreateFlagBits::eDisableOptimization: return "DisableOptimization";
-    case PipelineCreateFlagBits::eAllowDerivatives: return "AllowDerivatives";
-    case PipelineCreateFlagBits::eDerivative: return "Derivative";
-    case PipelineCreateFlagBits::eViewIndexFromDeviceIndex: return "ViewIndexFromDeviceIndex";
-    case PipelineCreateFlagBits::eDispatchBase: return "DispatchBase";
-    case PipelineCreateFlagBits::eDeferCompileNV: return "DeferCompileNV";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(PipelineCreateFlags value)
-  {
-    if (!value) return "{}";
-    std::string result;
-    if (value & PipelineCreateFlagBits::eDisableOptimization) result += "DisableOptimization | ";
-    if (value & PipelineCreateFlagBits::eAllowDerivatives) result += "AllowDerivatives | ";
-    if (value & PipelineCreateFlagBits::eDerivative) result += "Derivative | ";
-    if (value & PipelineCreateFlagBits::eViewIndexFromDeviceIndex) result += "ViewIndexFromDeviceIndex | ";
-    if (value & PipelineCreateFlagBits::eDispatchBase) result += "DispatchBase | ";
-    if (value & PipelineCreateFlagBits::eDeferCompileNV) result += "DeferCompileNV | ";
-    return "{" + result.substr(0, result.size() - 3) + "}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(ColorComponentFlagBits value)
-  {
-    switch (value)
-    {
-    case ColorComponentFlagBits::eR: return "R";
-    case ColorComponentFlagBits::eG: return "G";
-    case ColorComponentFlagBits::eB: return "B";
-    case ColorComponentFlagBits::eA: return "A";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(ColorComponentFlags value)
-  {
-    if (!value) return "{}";
-    std::string result;
-    if (value & ColorComponentFlagBits::eR) result += "R | ";
-    if (value & ColorComponentFlagBits::eG) result += "G | ";
-    if (value & ColorComponentFlagBits::eB) result += "B | ";
-    if (value & ColorComponentFlagBits::eA) result += "A | ";
-    return "{" + result.substr(0, result.size() - 3) + "}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(FenceCreateFlagBits value)
-  {
-    switch (value)
-    {
-    case FenceCreateFlagBits::eSignaled: return "Signaled";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(FenceCreateFlags value)
-  {
-    if (!value) return "{}";
-    std::string result;
-    if (value & FenceCreateFlagBits::eSignaled) result += "Signaled | ";
-    return "{" + result.substr(0, result.size() - 3) + "}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(FormatFeatureFlagBits value)
-  {
-    switch (value)
-    {
-    case FormatFeatureFlagBits::eSampledImage: return "SampledImage";
-    case FormatFeatureFlagBits::eStorageImage: return "StorageImage";
-    case FormatFeatureFlagBits::eStorageImageAtomic: return "StorageImageAtomic";
-    case FormatFeatureFlagBits::eUniformTexelBuffer: return "UniformTexelBuffer";
-    case FormatFeatureFlagBits::eStorageTexelBuffer: return "StorageTexelBuffer";
-    case FormatFeatureFlagBits::eStorageTexelBufferAtomic: return "StorageTexelBufferAtomic";
-    case FormatFeatureFlagBits::eVertexBuffer: return "VertexBuffer";
-    case FormatFeatureFlagBits::eColorAttachment: return "ColorAttachment";
-    case FormatFeatureFlagBits::eColorAttachmentBlend: return "ColorAttachmentBlend";
-    case FormatFeatureFlagBits::eDepthStencilAttachment: return "DepthStencilAttachment";
-    case FormatFeatureFlagBits::eBlitSrc: return "BlitSrc";
-    case FormatFeatureFlagBits::eBlitDst: return "BlitDst";
-    case FormatFeatureFlagBits::eSampledImageFilterLinear: return "SampledImageFilterLinear";
-    case FormatFeatureFlagBits::eTransferSrc: return "TransferSrc";
-    case FormatFeatureFlagBits::eTransferDst: return "TransferDst";
-    case FormatFeatureFlagBits::eMidpointChromaSamples: return "MidpointChromaSamples";
-    case FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter: return "SampledImageYcbcrConversionLinearFilter";
-    case FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter: return "SampledImageYcbcrConversionSeparateReconstructionFilter";
-    case FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit: return "SampledImageYcbcrConversionChromaReconstructionExplicit";
-    case FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable: return "SampledImageYcbcrConversionChromaReconstructionExplicitForceable";
-    case FormatFeatureFlagBits::eDisjoint: return "Disjoint";
-    case FormatFeatureFlagBits::eCositedChromaSamples: return "CositedChromaSamples";
-    case FormatFeatureFlagBits::eSampledImageFilterCubicIMG: return "SampledImageFilterCubicIMG";
-    case FormatFeatureFlagBits::eSampledImageFilterMinmaxEXT: return "SampledImageFilterMinmaxEXT";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(FormatFeatureFlags value)
-  {
-    if (!value) return "{}";
-    std::string result;
-    if (value & FormatFeatureFlagBits::eSampledImage) result += "SampledImage | ";
-    if (value & FormatFeatureFlagBits::eStorageImage) result += "StorageImage | ";
-    if (value & FormatFeatureFlagBits::eStorageImageAtomic) result += "StorageImageAtomic | ";
-    if (value & FormatFeatureFlagBits::eUniformTexelBuffer) result += "UniformTexelBuffer | ";
-    if (value & FormatFeatureFlagBits::eStorageTexelBuffer) result += "StorageTexelBuffer | ";
-    if (value & FormatFeatureFlagBits::eStorageTexelBufferAtomic) result += "StorageTexelBufferAtomic | ";
-    if (value & FormatFeatureFlagBits::eVertexBuffer) result += "VertexBuffer | ";
-    if (value & FormatFeatureFlagBits::eColorAttachment) result += "ColorAttachment | ";
-    if (value & FormatFeatureFlagBits::eColorAttachmentBlend) result += "ColorAttachmentBlend | ";
-    if (value & FormatFeatureFlagBits::eDepthStencilAttachment) result += "DepthStencilAttachment | ";
-    if (value & FormatFeatureFlagBits::eBlitSrc) result += "BlitSrc | ";
-    if (value & FormatFeatureFlagBits::eBlitDst) result += "BlitDst | ";
-    if (value & FormatFeatureFlagBits::eSampledImageFilterLinear) result += "SampledImageFilterLinear | ";
-    if (value & FormatFeatureFlagBits::eTransferSrc) result += "TransferSrc | ";
-    if (value & FormatFeatureFlagBits::eTransferDst) result += "TransferDst | ";
-    if (value & FormatFeatureFlagBits::eMidpointChromaSamples) result += "MidpointChromaSamples | ";
-    if (value & FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter) result += "SampledImageYcbcrConversionLinearFilter | ";
-    if (value & FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter) result += "SampledImageYcbcrConversionSeparateReconstructionFilter | ";
-    if (value & FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit) result += "SampledImageYcbcrConversionChromaReconstructionExplicit | ";
-    if (value & FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable) result += "SampledImageYcbcrConversionChromaReconstructionExplicitForceable | ";
-    if (value & FormatFeatureFlagBits::eDisjoint) result += "Disjoint | ";
-    if (value & FormatFeatureFlagBits::eCositedChromaSamples) result += "CositedChromaSamples | ";
-    if (value & FormatFeatureFlagBits::eSampledImageFilterCubicIMG) result += "SampledImageFilterCubicIMG | ";
-    if (value & FormatFeatureFlagBits::eSampledImageFilterMinmaxEXT) result += "SampledImageFilterMinmaxEXT | ";
-    return "{" + result.substr(0, result.size() - 3) + "}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(QueryControlFlagBits value)
-  {
-    switch (value)
-    {
-    case QueryControlFlagBits::ePrecise: return "Precise";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(QueryControlFlags value)
-  {
-    if (!value) return "{}";
-    std::string result;
-    if (value & QueryControlFlagBits::ePrecise) result += "Precise | ";
-    return "{" + result.substr(0, result.size() - 3) + "}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(QueryResultFlagBits value)
-  {
-    switch (value)
-    {
-    case QueryResultFlagBits::e64: return "64";
-    case QueryResultFlagBits::eWait: return "Wait";
-    case QueryResultFlagBits::eWithAvailability: return "WithAvailability";
-    case QueryResultFlagBits::ePartial: return "Partial";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(QueryResultFlags value)
-  {
-    if (!value) return "{}";
-    std::string result;
-    if (value & QueryResultFlagBits::e64) result += "64 | ";
-    if (value & QueryResultFlagBits::eWait) result += "Wait | ";
-    if (value & QueryResultFlagBits::eWithAvailability) result += "WithAvailability | ";
-    if (value & QueryResultFlagBits::ePartial) result += "Partial | ";
-    return "{" + result.substr(0, result.size() - 3) + "}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(CommandBufferUsageFlagBits value)
-  {
-    switch (value)
-    {
-    case CommandBufferUsageFlagBits::eOneTimeSubmit: return "OneTimeSubmit";
-    case CommandBufferUsageFlagBits::eRenderPassContinue: return "RenderPassContinue";
-    case CommandBufferUsageFlagBits::eSimultaneousUse: return "SimultaneousUse";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(CommandBufferUsageFlags value)
-  {
-    if (!value) return "{}";
-    std::string result;
-    if (value & CommandBufferUsageFlagBits::eOneTimeSubmit) result += "OneTimeSubmit | ";
-    if (value & CommandBufferUsageFlagBits::eRenderPassContinue) result += "RenderPassContinue | ";
-    if (value & CommandBufferUsageFlagBits::eSimultaneousUse) result += "SimultaneousUse | ";
-    return "{" + result.substr(0, result.size() - 3) + "}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(QueryPipelineStatisticFlagBits value)
-  {
-    switch (value)
-    {
-    case QueryPipelineStatisticFlagBits::eInputAssemblyVertices: return "InputAssemblyVertices";
-    case QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives: return "InputAssemblyPrimitives";
-    case QueryPipelineStatisticFlagBits::eVertexShaderInvocations: return "VertexShaderInvocations";
-    case QueryPipelineStatisticFlagBits::eGeometryShaderInvocations: return "GeometryShaderInvocations";
-    case QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives: return "GeometryShaderPrimitives";
-    case QueryPipelineStatisticFlagBits::eClippingInvocations: return "ClippingInvocations";
-    case QueryPipelineStatisticFlagBits::eClippingPrimitives: return "ClippingPrimitives";
-    case QueryPipelineStatisticFlagBits::eFragmentShaderInvocations: return "FragmentShaderInvocations";
-    case QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches: return "TessellationControlShaderPatches";
-    case QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations: return "TessellationEvaluationShaderInvocations";
-    case QueryPipelineStatisticFlagBits::eComputeShaderInvocations: return "ComputeShaderInvocations";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(QueryPipelineStatisticFlags value)
-  {
-    if (!value) return "{}";
-    std::string result;
-    if (value & QueryPipelineStatisticFlagBits::eInputAssemblyVertices) result += "InputAssemblyVertices | ";
-    if (value & QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives) result += "InputAssemblyPrimitives | ";
-    if (value & QueryPipelineStatisticFlagBits::eVertexShaderInvocations) result += "VertexShaderInvocations | ";
-    if (value & QueryPipelineStatisticFlagBits::eGeometryShaderInvocations) result += "GeometryShaderInvocations | ";
-    if (value & QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives) result += "GeometryShaderPrimitives | ";
-    if (value & QueryPipelineStatisticFlagBits::eClippingInvocations) result += "ClippingInvocations | ";
-    if (value & QueryPipelineStatisticFlagBits::eClippingPrimitives) result += "ClippingPrimitives | ";
-    if (value & QueryPipelineStatisticFlagBits::eFragmentShaderInvocations) result += "FragmentShaderInvocations | ";
-    if (value & QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches) result += "TessellationControlShaderPatches | ";
-    if (value & QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations) result += "TessellationEvaluationShaderInvocations | ";
-    if (value & QueryPipelineStatisticFlagBits::eComputeShaderInvocations) result += "ComputeShaderInvocations | ";
-    return "{" + result.substr(0, result.size() - 3) + "}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(ImageAspectFlagBits value)
-  {
-    switch (value)
-    {
-    case ImageAspectFlagBits::eColor: return "Color";
-    case ImageAspectFlagBits::eDepth: return "Depth";
-    case ImageAspectFlagBits::eStencil: return "Stencil";
-    case ImageAspectFlagBits::eMetadata: return "Metadata";
-    case ImageAspectFlagBits::ePlane0: return "Plane0";
-    case ImageAspectFlagBits::ePlane1: return "Plane1";
-    case ImageAspectFlagBits::ePlane2: return "Plane2";
-    case ImageAspectFlagBits::eMemoryPlane0EXT: return "MemoryPlane0EXT";
-    case ImageAspectFlagBits::eMemoryPlane1EXT: return "MemoryPlane1EXT";
-    case ImageAspectFlagBits::eMemoryPlane2EXT: return "MemoryPlane2EXT";
-    case ImageAspectFlagBits::eMemoryPlane3EXT: return "MemoryPlane3EXT";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(ImageAspectFlags value)
-  {
-    if (!value) return "{}";
-    std::string result;
-    if (value & ImageAspectFlagBits::eColor) result += "Color | ";
-    if (value & ImageAspectFlagBits::eDepth) result += "Depth | ";
-    if (value & ImageAspectFlagBits::eStencil) result += "Stencil | ";
-    if (value & ImageAspectFlagBits::eMetadata) result += "Metadata | ";
-    if (value & ImageAspectFlagBits::ePlane0) result += "Plane0 | ";
-    if (value & ImageAspectFlagBits::ePlane1) result += "Plane1 | ";
-    if (value & ImageAspectFlagBits::ePlane2) result += "Plane2 | ";
-    if (value & ImageAspectFlagBits::eMemoryPlane0EXT) result += "MemoryPlane0EXT | ";
-    if (value & ImageAspectFlagBits::eMemoryPlane1EXT) result += "MemoryPlane1EXT | ";
-    if (value & ImageAspectFlagBits::eMemoryPlane2EXT) result += "MemoryPlane2EXT | ";
-    if (value & ImageAspectFlagBits::eMemoryPlane3EXT) result += "MemoryPlane3EXT | ";
-    return "{" + result.substr(0, result.size() - 3) + "}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(SparseImageFormatFlagBits value)
-  {
-    switch (value)
-    {
-    case SparseImageFormatFlagBits::eSingleMiptail: return "SingleMiptail";
-    case SparseImageFormatFlagBits::eAlignedMipSize: return "AlignedMipSize";
-    case SparseImageFormatFlagBits::eNonstandardBlockSize: return "NonstandardBlockSize";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(SparseImageFormatFlags value)
-  {
-    if (!value) return "{}";
-    std::string result;
-    if (value & SparseImageFormatFlagBits::eSingleMiptail) result += "SingleMiptail | ";
-    if (value & SparseImageFormatFlagBits::eAlignedMipSize) result += "AlignedMipSize | ";
-    if (value & SparseImageFormatFlagBits::eNonstandardBlockSize) result += "NonstandardBlockSize | ";
-    return "{" + result.substr(0, result.size() - 3) + "}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(SparseMemoryBindFlagBits value)
-  {
-    switch (value)
-    {
-    case SparseMemoryBindFlagBits::eMetadata: return "Metadata";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(SparseMemoryBindFlags value)
-  {
-    if (!value) return "{}";
-    std::string result;
-    if (value & SparseMemoryBindFlagBits::eMetadata) result += "Metadata | ";
-    return "{" + result.substr(0, result.size() - 3) + "}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(PipelineStageFlagBits value)
-  {
-    switch (value)
-    {
-    case PipelineStageFlagBits::eTopOfPipe: return "TopOfPipe";
-    case PipelineStageFlagBits::eDrawIndirect: return "DrawIndirect";
-    case PipelineStageFlagBits::eVertexInput: return "VertexInput";
-    case PipelineStageFlagBits::eVertexShader: return "VertexShader";
-    case PipelineStageFlagBits::eTessellationControlShader: return "TessellationControlShader";
-    case PipelineStageFlagBits::eTessellationEvaluationShader: return "TessellationEvaluationShader";
-    case PipelineStageFlagBits::eGeometryShader: return "GeometryShader";
-    case PipelineStageFlagBits::eFragmentShader: return "FragmentShader";
-    case PipelineStageFlagBits::eEarlyFragmentTests: return "EarlyFragmentTests";
-    case PipelineStageFlagBits::eLateFragmentTests: return "LateFragmentTests";
-    case PipelineStageFlagBits::eColorAttachmentOutput: return "ColorAttachmentOutput";
-    case PipelineStageFlagBits::eComputeShader: return "ComputeShader";
-    case PipelineStageFlagBits::eTransfer: return "Transfer";
-    case PipelineStageFlagBits::eBottomOfPipe: return "BottomOfPipe";
-    case PipelineStageFlagBits::eHost: return "Host";
-    case PipelineStageFlagBits::eAllGraphics: return "AllGraphics";
-    case PipelineStageFlagBits::eAllCommands: return "AllCommands";
-    case PipelineStageFlagBits::eTransformFeedbackEXT: return "TransformFeedbackEXT";
-    case PipelineStageFlagBits::eConditionalRenderingEXT: return "ConditionalRenderingEXT";
-    case PipelineStageFlagBits::eCommandProcessNVX: return "CommandProcessNVX";
-    case PipelineStageFlagBits::eShadingRateImageNV: return "ShadingRateImageNV";
-    case PipelineStageFlagBits::eRayTracingShaderNV: return "RayTracingShaderNV";
-    case PipelineStageFlagBits::eAccelerationStructureBuildNV: return "AccelerationStructureBuildNV";
-    case PipelineStageFlagBits::eTaskShaderNV: return "TaskShaderNV";
-    case PipelineStageFlagBits::eMeshShaderNV: return "MeshShaderNV";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(PipelineStageFlags value)
-  {
-    if (!value) return "{}";
-    std::string result;
-    if (value & PipelineStageFlagBits::eTopOfPipe) result += "TopOfPipe | ";
-    if (value & PipelineStageFlagBits::eDrawIndirect) result += "DrawIndirect | ";
-    if (value & PipelineStageFlagBits::eVertexInput) result += "VertexInput | ";
-    if (value & PipelineStageFlagBits::eVertexShader) result += "VertexShader | ";
-    if (value & PipelineStageFlagBits::eTessellationControlShader) result += "TessellationControlShader | ";
-    if (value & PipelineStageFlagBits::eTessellationEvaluationShader) result += "TessellationEvaluationShader | ";
-    if (value & PipelineStageFlagBits::eGeometryShader) result += "GeometryShader | ";
-    if (value & PipelineStageFlagBits::eFragmentShader) result += "FragmentShader | ";
-    if (value & PipelineStageFlagBits::eEarlyFragmentTests) result += "EarlyFragmentTests | ";
-    if (value & PipelineStageFlagBits::eLateFragmentTests) result += "LateFragmentTests | ";
-    if (value & PipelineStageFlagBits::eColorAttachmentOutput) result += "ColorAttachmentOutput | ";
-    if (value & PipelineStageFlagBits::eComputeShader) result += "ComputeShader | ";
-    if (value & PipelineStageFlagBits::eTransfer) result += "Transfer | ";
-    if (value & PipelineStageFlagBits::eBottomOfPipe) result += "BottomOfPipe | ";
-    if (value & PipelineStageFlagBits::eHost) result += "Host | ";
-    if (value & PipelineStageFlagBits::eAllGraphics) result += "AllGraphics | ";
-    if (value & PipelineStageFlagBits::eAllCommands) result += "AllCommands | ";
-    if (value & PipelineStageFlagBits::eTransformFeedbackEXT) result += "TransformFeedbackEXT | ";
-    if (value & PipelineStageFlagBits::eConditionalRenderingEXT) result += "ConditionalRenderingEXT | ";
-    if (value & PipelineStageFlagBits::eCommandProcessNVX) result += "CommandProcessNVX | ";
-    if (value & PipelineStageFlagBits::eShadingRateImageNV) result += "ShadingRateImageNV | ";
-    if (value & PipelineStageFlagBits::eRayTracingShaderNV) result += "RayTracingShaderNV | ";
-    if (value & PipelineStageFlagBits::eAccelerationStructureBuildNV) result += "AccelerationStructureBuildNV | ";
-    if (value & PipelineStageFlagBits::eTaskShaderNV) result += "TaskShaderNV | ";
-    if (value & PipelineStageFlagBits::eMeshShaderNV) result += "MeshShaderNV | ";
-    return "{" + result.substr(0, result.size() - 3) + "}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(CommandPoolCreateFlagBits value)
-  {
-    switch (value)
-    {
-    case CommandPoolCreateFlagBits::eTransient: return "Transient";
-    case CommandPoolCreateFlagBits::eResetCommandBuffer: return "ResetCommandBuffer";
-    case CommandPoolCreateFlagBits::eProtected: return "Protected";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(CommandPoolCreateFlags value)
-  {
-    if (!value) return "{}";
-    std::string result;
-    if (value & CommandPoolCreateFlagBits::eTransient) result += "Transient | ";
-    if (value & CommandPoolCreateFlagBits::eResetCommandBuffer) result += "ResetCommandBuffer | ";
-    if (value & CommandPoolCreateFlagBits::eProtected) result += "Protected | ";
-    return "{" + result.substr(0, result.size() - 3) + "}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(CommandPoolResetFlagBits value)
-  {
-    switch (value)
-    {
-    case CommandPoolResetFlagBits::eReleaseResources: return "ReleaseResources";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(CommandPoolResetFlags value)
-  {
-    if (!value) return "{}";
-    std::string result;
-    if (value & CommandPoolResetFlagBits::eReleaseResources) result += "ReleaseResources | ";
-    return "{" + result.substr(0, result.size() - 3) + "}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(CommandBufferResetFlagBits value)
-  {
-    switch (value)
-    {
-    case CommandBufferResetFlagBits::eReleaseResources: return "ReleaseResources";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(CommandBufferResetFlags value)
-  {
-    if (!value) return "{}";
-    std::string result;
-    if (value & CommandBufferResetFlagBits::eReleaseResources) result += "ReleaseResources | ";
-    return "{" + result.substr(0, result.size() - 3) + "}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(SampleCountFlagBits value)
-  {
-    switch (value)
-    {
-    case SampleCountFlagBits::e1: return "1";
-    case SampleCountFlagBits::e2: return "2";
-    case SampleCountFlagBits::e4: return "4";
-    case SampleCountFlagBits::e8: return "8";
-    case SampleCountFlagBits::e16: return "16";
-    case SampleCountFlagBits::e32: return "32";
-    case SampleCountFlagBits::e64: return "64";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(SampleCountFlags value)
-  {
-    if (!value) return "{}";
-    std::string result;
-    if (value & SampleCountFlagBits::e1) result += "1 | ";
-    if (value & SampleCountFlagBits::e2) result += "2 | ";
-    if (value & SampleCountFlagBits::e4) result += "4 | ";
-    if (value & SampleCountFlagBits::e8) result += "8 | ";
-    if (value & SampleCountFlagBits::e16) result += "16 | ";
-    if (value & SampleCountFlagBits::e32) result += "32 | ";
-    if (value & SampleCountFlagBits::e64) result += "64 | ";
-    return "{" + result.substr(0, result.size() - 3) + "}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(AttachmentDescriptionFlagBits value)
-  {
-    switch (value)
-    {
-    case AttachmentDescriptionFlagBits::eMayAlias: return "MayAlias";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(AttachmentDescriptionFlags value)
-  {
-    if (!value) return "{}";
-    std::string result;
-    if (value & AttachmentDescriptionFlagBits::eMayAlias) result += "MayAlias | ";
-    return "{" + result.substr(0, result.size() - 3) + "}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(StencilFaceFlagBits value)
-  {
-    switch (value)
-    {
-    case StencilFaceFlagBits::eFront: return "Front";
-    case StencilFaceFlagBits::eBack: return "Back";
-    case StencilFaceFlagBits::eVkStencilFrontAndBack: return "VkStencilFrontAndBack";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(StencilFaceFlags value)
-  {
-    if (!value) return "{}";
-    std::string result;
-    if (value & StencilFaceFlagBits::eFront) result += "Front | ";
-    if (value & StencilFaceFlagBits::eBack) result += "Back | ";
-    if (value & StencilFaceFlagBits::eVkStencilFrontAndBack) result += "VkStencilFrontAndBack | ";
-    return "{" + result.substr(0, result.size() - 3) + "}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(DescriptorPoolCreateFlagBits value)
-  {
-    switch (value)
-    {
-    case DescriptorPoolCreateFlagBits::eFreeDescriptorSet: return "FreeDescriptorSet";
-    case DescriptorPoolCreateFlagBits::eUpdateAfterBindEXT: return "UpdateAfterBindEXT";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(DescriptorPoolCreateFlags value)
-  {
-    if (!value) return "{}";
-    std::string result;
-    if (value & DescriptorPoolCreateFlagBits::eFreeDescriptorSet) result += "FreeDescriptorSet | ";
-    if (value & DescriptorPoolCreateFlagBits::eUpdateAfterBindEXT) result += "UpdateAfterBindEXT | ";
-    return "{" + result.substr(0, result.size() - 3) + "}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(DependencyFlagBits value)
-  {
-    switch (value)
-    {
-    case DependencyFlagBits::eByRegion: return "ByRegion";
-    case DependencyFlagBits::eDeviceGroup: return "DeviceGroup";
-    case DependencyFlagBits::eViewLocal: return "ViewLocal";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(DependencyFlags value)
-  {
-    if (!value) return "{}";
-    std::string result;
-    if (value & DependencyFlagBits::eByRegion) result += "ByRegion | ";
-    if (value & DependencyFlagBits::eDeviceGroup) result += "DeviceGroup | ";
-    if (value & DependencyFlagBits::eViewLocal) result += "ViewLocal | ";
-    return "{" + result.substr(0, result.size() - 3) + "}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(PresentModeKHR value)
-  {
-    switch (value)
-    {
-    case PresentModeKHR::eImmediate: return "Immediate";
-    case PresentModeKHR::eMailbox: return "Mailbox";
-    case PresentModeKHR::eFifo: return "Fifo";
-    case PresentModeKHR::eFifoRelaxed: return "FifoRelaxed";
-    case PresentModeKHR::eSharedDemandRefresh: return "SharedDemandRefresh";
-    case PresentModeKHR::eSharedContinuousRefresh: return "SharedContinuousRefresh";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(ColorSpaceKHR value)
-  {
-    switch (value)
-    {
-    case ColorSpaceKHR::eSrgbNonlinear: return "SrgbNonlinear";
-    case ColorSpaceKHR::eDisplayP3NonlinearEXT: return "DisplayP3NonlinearEXT";
-    case ColorSpaceKHR::eExtendedSrgbLinearEXT: return "ExtendedSrgbLinearEXT";
-    case ColorSpaceKHR::eDciP3LinearEXT: return "DciP3LinearEXT";
-    case ColorSpaceKHR::eDciP3NonlinearEXT: return "DciP3NonlinearEXT";
-    case ColorSpaceKHR::eBt709LinearEXT: return "Bt709LinearEXT";
-    case ColorSpaceKHR::eBt709NonlinearEXT: return "Bt709NonlinearEXT";
-    case ColorSpaceKHR::eBt2020LinearEXT: return "Bt2020LinearEXT";
-    case ColorSpaceKHR::eHdr10St2084EXT: return "Hdr10St2084EXT";
-    case ColorSpaceKHR::eDolbyvisionEXT: return "DolbyvisionEXT";
-    case ColorSpaceKHR::eHdr10HlgEXT: return "Hdr10HlgEXT";
-    case ColorSpaceKHR::eAdobergbLinearEXT: return "AdobergbLinearEXT";
-    case ColorSpaceKHR::eAdobergbNonlinearEXT: return "AdobergbNonlinearEXT";
-    case ColorSpaceKHR::ePassThroughEXT: return "PassThroughEXT";
-    case ColorSpaceKHR::eExtendedSrgbNonlinearEXT: return "ExtendedSrgbNonlinearEXT";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(DisplayPlaneAlphaFlagBitsKHR value)
-  {
-    switch (value)
-    {
-    case DisplayPlaneAlphaFlagBitsKHR::eOpaque: return "Opaque";
-    case DisplayPlaneAlphaFlagBitsKHR::eGlobal: return "Global";
-    case DisplayPlaneAlphaFlagBitsKHR::ePerPixel: return "PerPixel";
-    case DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied: return "PerPixelPremultiplied";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(DisplayPlaneAlphaFlagsKHR value)
-  {
-    if (!value) return "{}";
-    std::string result;
-    if (value & DisplayPlaneAlphaFlagBitsKHR::eOpaque) result += "Opaque | ";
-    if (value & DisplayPlaneAlphaFlagBitsKHR::eGlobal) result += "Global | ";
-    if (value & DisplayPlaneAlphaFlagBitsKHR::ePerPixel) result += "PerPixel | ";
-    if (value & DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied) result += "PerPixelPremultiplied | ";
-    return "{" + result.substr(0, result.size() - 3) + "}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(CompositeAlphaFlagBitsKHR value)
-  {
-    switch (value)
-    {
-    case CompositeAlphaFlagBitsKHR::eOpaque: return "Opaque";
-    case CompositeAlphaFlagBitsKHR::ePreMultiplied: return "PreMultiplied";
-    case CompositeAlphaFlagBitsKHR::ePostMultiplied: return "PostMultiplied";
-    case CompositeAlphaFlagBitsKHR::eInherit: return "Inherit";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(CompositeAlphaFlagsKHR value)
-  {
-    if (!value) return "{}";
-    std::string result;
-    if (value & CompositeAlphaFlagBitsKHR::eOpaque) result += "Opaque | ";
-    if (value & CompositeAlphaFlagBitsKHR::ePreMultiplied) result += "PreMultiplied | ";
-    if (value & CompositeAlphaFlagBitsKHR::ePostMultiplied) result += "PostMultiplied | ";
-    if (value & CompositeAlphaFlagBitsKHR::eInherit) result += "Inherit | ";
-    return "{" + result.substr(0, result.size() - 3) + "}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(SurfaceTransformFlagBitsKHR value)
-  {
-    switch (value)
-    {
-    case SurfaceTransformFlagBitsKHR::eIdentity: return "Identity";
-    case SurfaceTransformFlagBitsKHR::eRotate90: return "Rotate90";
-    case SurfaceTransformFlagBitsKHR::eRotate180: return "Rotate180";
-    case SurfaceTransformFlagBitsKHR::eRotate270: return "Rotate270";
-    case SurfaceTransformFlagBitsKHR::eHorizontalMirror: return "HorizontalMirror";
-    case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90: return "HorizontalMirrorRotate90";
-    case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180: return "HorizontalMirrorRotate180";
-    case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270: return "HorizontalMirrorRotate270";
-    case SurfaceTransformFlagBitsKHR::eInherit: return "Inherit";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(SurfaceTransformFlagsKHR value)
-  {
-    if (!value) return "{}";
-    std::string result;
-    if (value & SurfaceTransformFlagBitsKHR::eIdentity) result += "Identity | ";
-    if (value & SurfaceTransformFlagBitsKHR::eRotate90) result += "Rotate90 | ";
-    if (value & SurfaceTransformFlagBitsKHR::eRotate180) result += "Rotate180 | ";
-    if (value & SurfaceTransformFlagBitsKHR::eRotate270) result += "Rotate270 | ";
-    if (value & SurfaceTransformFlagBitsKHR::eHorizontalMirror) result += "HorizontalMirror | ";
-    if (value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90) result += "HorizontalMirrorRotate90 | ";
-    if (value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180) result += "HorizontalMirrorRotate180 | ";
-    if (value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270) result += "HorizontalMirrorRotate270 | ";
-    if (value & SurfaceTransformFlagBitsKHR::eInherit) result += "Inherit | ";
-    return "{" + result.substr(0, result.size() - 3) + "}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(TimeDomainEXT value)
-  {
-    switch (value)
-    {
-    case TimeDomainEXT::eDevice: return "Device";
-    case TimeDomainEXT::eClockMonotonic: return "ClockMonotonic";
-    case TimeDomainEXT::eClockMonotonicRaw: return "ClockMonotonicRaw";
-    case TimeDomainEXT::eQueryPerformanceCounter: return "QueryPerformanceCounter";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(DebugReportFlagBitsEXT value)
-  {
-    switch (value)
-    {
-    case DebugReportFlagBitsEXT::eInformation: return "Information";
-    case DebugReportFlagBitsEXT::eWarning: return "Warning";
-    case DebugReportFlagBitsEXT::ePerformanceWarning: return "PerformanceWarning";
-    case DebugReportFlagBitsEXT::eError: return "Error";
-    case DebugReportFlagBitsEXT::eDebug: return "Debug";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(DebugReportFlagsEXT value)
-  {
-    if (!value) return "{}";
-    std::string result;
-    if (value & DebugReportFlagBitsEXT::eInformation) result += "Information | ";
-    if (value & DebugReportFlagBitsEXT::eWarning) result += "Warning | ";
-    if (value & DebugReportFlagBitsEXT::ePerformanceWarning) result += "PerformanceWarning | ";
-    if (value & DebugReportFlagBitsEXT::eError) result += "Error | ";
-    if (value & DebugReportFlagBitsEXT::eDebug) result += "Debug | ";
-    return "{" + result.substr(0, result.size() - 3) + "}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(DebugReportObjectTypeEXT value)
-  {
-    switch (value)
-    {
-    case DebugReportObjectTypeEXT::eUnknown: return "Unknown";
-    case DebugReportObjectTypeEXT::eInstance: return "Instance";
-    case DebugReportObjectTypeEXT::ePhysicalDevice: return "PhysicalDevice";
-    case DebugReportObjectTypeEXT::eDevice: return "Device";
-    case DebugReportObjectTypeEXT::eQueue: return "Queue";
-    case DebugReportObjectTypeEXT::eSemaphore: return "Semaphore";
-    case DebugReportObjectTypeEXT::eCommandBuffer: return "CommandBuffer";
-    case DebugReportObjectTypeEXT::eFence: return "Fence";
-    case DebugReportObjectTypeEXT::eDeviceMemory: return "DeviceMemory";
-    case DebugReportObjectTypeEXT::eBuffer: return "Buffer";
-    case DebugReportObjectTypeEXT::eImage: return "Image";
-    case DebugReportObjectTypeEXT::eEvent: return "Event";
-    case DebugReportObjectTypeEXT::eQueryPool: return "QueryPool";
-    case DebugReportObjectTypeEXT::eBufferView: return "BufferView";
-    case DebugReportObjectTypeEXT::eImageView: return "ImageView";
-    case DebugReportObjectTypeEXT::eShaderModule: return "ShaderModule";
-    case DebugReportObjectTypeEXT::ePipelineCache: return "PipelineCache";
-    case DebugReportObjectTypeEXT::ePipelineLayout: return "PipelineLayout";
-    case DebugReportObjectTypeEXT::eRenderPass: return "RenderPass";
-    case DebugReportObjectTypeEXT::ePipeline: return "Pipeline";
-    case DebugReportObjectTypeEXT::eDescriptorSetLayout: return "DescriptorSetLayout";
-    case DebugReportObjectTypeEXT::eSampler: return "Sampler";
-    case DebugReportObjectTypeEXT::eDescriptorPool: return "DescriptorPool";
-    case DebugReportObjectTypeEXT::eDescriptorSet: return "DescriptorSet";
-    case DebugReportObjectTypeEXT::eFramebuffer: return "Framebuffer";
-    case DebugReportObjectTypeEXT::eCommandPool: return "CommandPool";
-    case DebugReportObjectTypeEXT::eSurfaceKhr: return "SurfaceKhr";
-    case DebugReportObjectTypeEXT::eSwapchainKhr: return "SwapchainKhr";
-    case DebugReportObjectTypeEXT::eDebugReportCallbackExt: return "DebugReportCallbackExt";
-    case DebugReportObjectTypeEXT::eDisplayKhr: return "DisplayKhr";
-    case DebugReportObjectTypeEXT::eDisplayModeKhr: return "DisplayModeKhr";
-    case DebugReportObjectTypeEXT::eObjectTableNvx: return "ObjectTableNvx";
-    case DebugReportObjectTypeEXT::eIndirectCommandsLayoutNvx: return "IndirectCommandsLayoutNvx";
-    case DebugReportObjectTypeEXT::eValidationCacheExt: return "ValidationCacheExt";
-    case DebugReportObjectTypeEXT::eSamplerYcbcrConversion: return "SamplerYcbcrConversion";
-    case DebugReportObjectTypeEXT::eDescriptorUpdateTemplate: return "DescriptorUpdateTemplate";
-    case DebugReportObjectTypeEXT::eAccelerationStructureNV: return "AccelerationStructureNV";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(RasterizationOrderAMD value)
-  {
-    switch (value)
-    {
-    case RasterizationOrderAMD::eStrict: return "Strict";
-    case RasterizationOrderAMD::eRelaxed: return "Relaxed";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(ExternalMemoryHandleTypeFlagBitsNV value)
-  {
-    switch (value)
-    {
-    case ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32: return "OpaqueWin32";
-    case ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt: return "OpaqueWin32Kmt";
-    case ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image: return "D3D11Image";
-    case ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt: return "D3D11ImageKmt";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(ExternalMemoryHandleTypeFlagsNV value)
-  {
-    if (!value) return "{}";
-    std::string result;
-    if (value & ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32) result += "OpaqueWin32 | ";
-    if (value & ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt) result += "OpaqueWin32Kmt | ";
-    if (value & ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image) result += "D3D11Image | ";
-    if (value & ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt) result += "D3D11ImageKmt | ";
-    return "{" + result.substr(0, result.size() - 3) + "}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(ExternalMemoryFeatureFlagBitsNV value)
-  {
-    switch (value)
-    {
-    case ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly: return "DedicatedOnly";
-    case ExternalMemoryFeatureFlagBitsNV::eExportable: return "Exportable";
-    case ExternalMemoryFeatureFlagBitsNV::eImportable: return "Importable";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(ExternalMemoryFeatureFlagsNV value)
-  {
-    if (!value) return "{}";
-    std::string result;
-    if (value & ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly) result += "DedicatedOnly | ";
-    if (value & ExternalMemoryFeatureFlagBitsNV::eExportable) result += "Exportable | ";
-    if (value & ExternalMemoryFeatureFlagBitsNV::eImportable) result += "Importable | ";
-    return "{" + result.substr(0, result.size() - 3) + "}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(ValidationCheckEXT value)
-  {
-    switch (value)
-    {
-    case ValidationCheckEXT::eAll: return "All";
-    case ValidationCheckEXT::eShaders: return "Shaders";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(SubgroupFeatureFlagBits value)
-  {
-    switch (value)
-    {
-    case SubgroupFeatureFlagBits::eBasic: return "Basic";
-    case SubgroupFeatureFlagBits::eVote: return "Vote";
-    case SubgroupFeatureFlagBits::eArithmetic: return "Arithmetic";
-    case SubgroupFeatureFlagBits::eBallot: return "Ballot";
-    case SubgroupFeatureFlagBits::eShuffle: return "Shuffle";
-    case SubgroupFeatureFlagBits::eShuffleRelative: return "ShuffleRelative";
-    case SubgroupFeatureFlagBits::eClustered: return "Clustered";
-    case SubgroupFeatureFlagBits::eQuad: return "Quad";
-    case SubgroupFeatureFlagBits::ePartitionedNV: return "PartitionedNV";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(SubgroupFeatureFlags value)
-  {
-    if (!value) return "{}";
-    std::string result;
-    if (value & SubgroupFeatureFlagBits::eBasic) result += "Basic | ";
-    if (value & SubgroupFeatureFlagBits::eVote) result += "Vote | ";
-    if (value & SubgroupFeatureFlagBits::eArithmetic) result += "Arithmetic | ";
-    if (value & SubgroupFeatureFlagBits::eBallot) result += "Ballot | ";
-    if (value & SubgroupFeatureFlagBits::eShuffle) result += "Shuffle | ";
-    if (value & SubgroupFeatureFlagBits::eShuffleRelative) result += "ShuffleRelative | ";
-    if (value & SubgroupFeatureFlagBits::eClustered) result += "Clustered | ";
-    if (value & SubgroupFeatureFlagBits::eQuad) result += "Quad | ";
-    if (value & SubgroupFeatureFlagBits::ePartitionedNV) result += "PartitionedNV | ";
-    return "{" + result.substr(0, result.size() - 3) + "}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(IndirectCommandsLayoutUsageFlagBitsNVX value)
-  {
-    switch (value)
-    {
-    case IndirectCommandsLayoutUsageFlagBitsNVX::eUnorderedSequences: return "UnorderedSequences";
-    case IndirectCommandsLayoutUsageFlagBitsNVX::eSparseSequences: return "SparseSequences";
-    case IndirectCommandsLayoutUsageFlagBitsNVX::eEmptyExecutions: return "EmptyExecutions";
-    case IndirectCommandsLayoutUsageFlagBitsNVX::eIndexedSequences: return "IndexedSequences";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(IndirectCommandsLayoutUsageFlagsNVX value)
-  {
-    if (!value) return "{}";
-    std::string result;
-    if (value & IndirectCommandsLayoutUsageFlagBitsNVX::eUnorderedSequences) result += "UnorderedSequences | ";
-    if (value & IndirectCommandsLayoutUsageFlagBitsNVX::eSparseSequences) result += "SparseSequences | ";
-    if (value & IndirectCommandsLayoutUsageFlagBitsNVX::eEmptyExecutions) result += "EmptyExecutions | ";
-    if (value & IndirectCommandsLayoutUsageFlagBitsNVX::eIndexedSequences) result += "IndexedSequences | ";
-    return "{" + result.substr(0, result.size() - 3) + "}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(ObjectEntryUsageFlagBitsNVX value)
-  {
-    switch (value)
-    {
-    case ObjectEntryUsageFlagBitsNVX::eGraphics: return "Graphics";
-    case ObjectEntryUsageFlagBitsNVX::eCompute: return "Compute";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(ObjectEntryUsageFlagsNVX value)
-  {
-    if (!value) return "{}";
-    std::string result;
-    if (value & ObjectEntryUsageFlagBitsNVX::eGraphics) result += "Graphics | ";
-    if (value & ObjectEntryUsageFlagBitsNVX::eCompute) result += "Compute | ";
-    return "{" + result.substr(0, result.size() - 3) + "}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(IndirectCommandsTokenTypeNVX value)
-  {
-    switch (value)
-    {
-    case IndirectCommandsTokenTypeNVX::ePipeline: return "Pipeline";
-    case IndirectCommandsTokenTypeNVX::eDescriptorSet: return "DescriptorSet";
-    case IndirectCommandsTokenTypeNVX::eIndexBuffer: return "IndexBuffer";
-    case IndirectCommandsTokenTypeNVX::eVertexBuffer: return "VertexBuffer";
-    case IndirectCommandsTokenTypeNVX::ePushConstant: return "PushConstant";
-    case IndirectCommandsTokenTypeNVX::eDrawIndexed: return "DrawIndexed";
-    case IndirectCommandsTokenTypeNVX::eDraw: return "Draw";
-    case IndirectCommandsTokenTypeNVX::eDispatch: return "Dispatch";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(ObjectEntryTypeNVX value)
-  {
-    switch (value)
-    {
-    case ObjectEntryTypeNVX::eDescriptorSet: return "DescriptorSet";
-    case ObjectEntryTypeNVX::ePipeline: return "Pipeline";
-    case ObjectEntryTypeNVX::eIndexBuffer: return "IndexBuffer";
-    case ObjectEntryTypeNVX::eVertexBuffer: return "VertexBuffer";
-    case ObjectEntryTypeNVX::ePushConstant: return "PushConstant";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(DescriptorSetLayoutCreateFlagBits value)
-  {
-    switch (value)
-    {
-    case DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR: return "PushDescriptorKHR";
-    case DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPoolEXT: return "UpdateAfterBindPoolEXT";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(DescriptorSetLayoutCreateFlags value)
-  {
-    if (!value) return "{}";
-    std::string result;
-    if (value & DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR) result += "PushDescriptorKHR | ";
-    if (value & DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPoolEXT) result += "UpdateAfterBindPoolEXT | ";
-    return "{" + result.substr(0, result.size() - 3) + "}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(ExternalMemoryHandleTypeFlagBits value)
-  {
-    switch (value)
-    {
-    case ExternalMemoryHandleTypeFlagBits::eOpaqueFd: return "OpaqueFd";
-    case ExternalMemoryHandleTypeFlagBits::eOpaqueWin32: return "OpaqueWin32";
-    case ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt: return "OpaqueWin32Kmt";
-    case ExternalMemoryHandleTypeFlagBits::eD3D11Texture: return "D3D11Texture";
-    case ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt: return "D3D11TextureKmt";
-    case ExternalMemoryHandleTypeFlagBits::eD3D12Heap: return "D3D12Heap";
-    case ExternalMemoryHandleTypeFlagBits::eD3D12Resource: return "D3D12Resource";
-    case ExternalMemoryHandleTypeFlagBits::eDmaBufEXT: return "DmaBufEXT";
-    case ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID: return "AndroidHardwareBufferANDROID";
-    case ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT: return "HostAllocationEXT";
-    case ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT: return "HostMappedForeignMemoryEXT";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(ExternalMemoryHandleTypeFlags value)
-  {
-    if (!value) return "{}";
-    std::string result;
-    if (value & ExternalMemoryHandleTypeFlagBits::eOpaqueFd) result += "OpaqueFd | ";
-    if (value & ExternalMemoryHandleTypeFlagBits::eOpaqueWin32) result += "OpaqueWin32 | ";
-    if (value & ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt) result += "OpaqueWin32Kmt | ";
-    if (value & ExternalMemoryHandleTypeFlagBits::eD3D11Texture) result += "D3D11Texture | ";
-    if (value & ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt) result += "D3D11TextureKmt | ";
-    if (value & ExternalMemoryHandleTypeFlagBits::eD3D12Heap) result += "D3D12Heap | ";
-    if (value & ExternalMemoryHandleTypeFlagBits::eD3D12Resource) result += "D3D12Resource | ";
-    if (value & ExternalMemoryHandleTypeFlagBits::eDmaBufEXT) result += "DmaBufEXT | ";
-    if (value & ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID) result += "AndroidHardwareBufferANDROID | ";
-    if (value & ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT) result += "HostAllocationEXT | ";
-    if (value & ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT) result += "HostMappedForeignMemoryEXT | ";
-    return "{" + result.substr(0, result.size() - 3) + "}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(ExternalMemoryFeatureFlagBits value)
-  {
-    switch (value)
-    {
-    case ExternalMemoryFeatureFlagBits::eDedicatedOnly: return "DedicatedOnly";
-    case ExternalMemoryFeatureFlagBits::eExportable: return "Exportable";
-    case ExternalMemoryFeatureFlagBits::eImportable: return "Importable";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(ExternalMemoryFeatureFlags value)
-  {
-    if (!value) return "{}";
-    std::string result;
-    if (value & ExternalMemoryFeatureFlagBits::eDedicatedOnly) result += "DedicatedOnly | ";
-    if (value & ExternalMemoryFeatureFlagBits::eExportable) result += "Exportable | ";
-    if (value & ExternalMemoryFeatureFlagBits::eImportable) result += "Importable | ";
-    return "{" + result.substr(0, result.size() - 3) + "}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(ExternalSemaphoreHandleTypeFlagBits value)
-  {
-    switch (value)
-    {
-    case ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd: return "OpaqueFd";
-    case ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32: return "OpaqueWin32";
-    case ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt: return "OpaqueWin32Kmt";
-    case ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence: return "D3D12Fence";
-    case ExternalSemaphoreHandleTypeFlagBits::eSyncFd: return "SyncFd";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(ExternalSemaphoreHandleTypeFlags value)
-  {
-    if (!value) return "{}";
-    std::string result;
-    if (value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd) result += "OpaqueFd | ";
-    if (value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32) result += "OpaqueWin32 | ";
-    if (value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt) result += "OpaqueWin32Kmt | ";
-    if (value & ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence) result += "D3D12Fence | ";
-    if (value & ExternalSemaphoreHandleTypeFlagBits::eSyncFd) result += "SyncFd | ";
-    return "{" + result.substr(0, result.size() - 3) + "}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(ExternalSemaphoreFeatureFlagBits value)
-  {
-    switch (value)
-    {
-    case ExternalSemaphoreFeatureFlagBits::eExportable: return "Exportable";
-    case ExternalSemaphoreFeatureFlagBits::eImportable: return "Importable";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(ExternalSemaphoreFeatureFlags value)
-  {
-    if (!value) return "{}";
-    std::string result;
-    if (value & ExternalSemaphoreFeatureFlagBits::eExportable) result += "Exportable | ";
-    if (value & ExternalSemaphoreFeatureFlagBits::eImportable) result += "Importable | ";
-    return "{" + result.substr(0, result.size() - 3) + "}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(SemaphoreImportFlagBits value)
-  {
-    switch (value)
-    {
-    case SemaphoreImportFlagBits::eTemporary: return "Temporary";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(SemaphoreImportFlags value)
-  {
-    if (!value) return "{}";
-    std::string result;
-    if (value & SemaphoreImportFlagBits::eTemporary) result += "Temporary | ";
-    return "{" + result.substr(0, result.size() - 3) + "}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(ExternalFenceHandleTypeFlagBits value)
-  {
-    switch (value)
-    {
-    case ExternalFenceHandleTypeFlagBits::eOpaqueFd: return "OpaqueFd";
-    case ExternalFenceHandleTypeFlagBits::eOpaqueWin32: return "OpaqueWin32";
-    case ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt: return "OpaqueWin32Kmt";
-    case ExternalFenceHandleTypeFlagBits::eSyncFd: return "SyncFd";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(ExternalFenceHandleTypeFlags value)
-  {
-    if (!value) return "{}";
-    std::string result;
-    if (value & ExternalFenceHandleTypeFlagBits::eOpaqueFd) result += "OpaqueFd | ";
-    if (value & ExternalFenceHandleTypeFlagBits::eOpaqueWin32) result += "OpaqueWin32 | ";
-    if (value & ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt) result += "OpaqueWin32Kmt | ";
-    if (value & ExternalFenceHandleTypeFlagBits::eSyncFd) result += "SyncFd | ";
-    return "{" + result.substr(0, result.size() - 3) + "}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(ExternalFenceFeatureFlagBits value)
-  {
-    switch (value)
-    {
-    case ExternalFenceFeatureFlagBits::eExportable: return "Exportable";
-    case ExternalFenceFeatureFlagBits::eImportable: return "Importable";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(ExternalFenceFeatureFlags value)
-  {
-    if (!value) return "{}";
-    std::string result;
-    if (value & ExternalFenceFeatureFlagBits::eExportable) result += "Exportable | ";
-    if (value & ExternalFenceFeatureFlagBits::eImportable) result += "Importable | ";
-    return "{" + result.substr(0, result.size() - 3) + "}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(FenceImportFlagBits value)
-  {
-    switch (value)
-    {
-    case FenceImportFlagBits::eTemporary: return "Temporary";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(FenceImportFlags value)
-  {
-    if (!value) return "{}";
-    std::string result;
-    if (value & FenceImportFlagBits::eTemporary) result += "Temporary | ";
-    return "{" + result.substr(0, result.size() - 3) + "}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(SurfaceCounterFlagBitsEXT value)
-  {
-    switch (value)
-    {
-    case SurfaceCounterFlagBitsEXT::eVblank: return "Vblank";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(SurfaceCounterFlagsEXT value)
-  {
-    if (!value) return "{}";
-    std::string result;
-    if (value & SurfaceCounterFlagBitsEXT::eVblank) result += "Vblank | ";
-    return "{" + result.substr(0, result.size() - 3) + "}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(DisplayPowerStateEXT value)
-  {
-    switch (value)
-    {
-    case DisplayPowerStateEXT::eOff: return "Off";
-    case DisplayPowerStateEXT::eSuspend: return "Suspend";
-    case DisplayPowerStateEXT::eOn: return "On";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(DeviceEventTypeEXT value)
-  {
-    switch (value)
-    {
-    case DeviceEventTypeEXT::eDisplayHotplug: return "DisplayHotplug";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(DisplayEventTypeEXT value)
-  {
-    switch (value)
-    {
-    case DisplayEventTypeEXT::eFirstPixelOut: return "FirstPixelOut";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(PeerMemoryFeatureFlagBits value)
-  {
-    switch (value)
-    {
-    case PeerMemoryFeatureFlagBits::eCopySrc: return "CopySrc";
-    case PeerMemoryFeatureFlagBits::eCopyDst: return "CopyDst";
-    case PeerMemoryFeatureFlagBits::eGenericSrc: return "GenericSrc";
-    case PeerMemoryFeatureFlagBits::eGenericDst: return "GenericDst";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(PeerMemoryFeatureFlags value)
-  {
-    if (!value) return "{}";
-    std::string result;
-    if (value & PeerMemoryFeatureFlagBits::eCopySrc) result += "CopySrc | ";
-    if (value & PeerMemoryFeatureFlagBits::eCopyDst) result += "CopyDst | ";
-    if (value & PeerMemoryFeatureFlagBits::eGenericSrc) result += "GenericSrc | ";
-    if (value & PeerMemoryFeatureFlagBits::eGenericDst) result += "GenericDst | ";
-    return "{" + result.substr(0, result.size() - 3) + "}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(MemoryAllocateFlagBits value)
-  {
-    switch (value)
-    {
-    case MemoryAllocateFlagBits::eDeviceMask: return "DeviceMask";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(MemoryAllocateFlags value)
-  {
-    if (!value) return "{}";
-    std::string result;
-    if (value & MemoryAllocateFlagBits::eDeviceMask) result += "DeviceMask | ";
-    return "{" + result.substr(0, result.size() - 3) + "}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(DeviceGroupPresentModeFlagBitsKHR value)
-  {
-    switch (value)
-    {
-    case DeviceGroupPresentModeFlagBitsKHR::eLocal: return "Local";
-    case DeviceGroupPresentModeFlagBitsKHR::eRemote: return "Remote";
-    case DeviceGroupPresentModeFlagBitsKHR::eSum: return "Sum";
-    case DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice: return "LocalMultiDevice";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(DeviceGroupPresentModeFlagsKHR value)
-  {
-    if (!value) return "{}";
-    std::string result;
-    if (value & DeviceGroupPresentModeFlagBitsKHR::eLocal) result += "Local | ";
-    if (value & DeviceGroupPresentModeFlagBitsKHR::eRemote) result += "Remote | ";
-    if (value & DeviceGroupPresentModeFlagBitsKHR::eSum) result += "Sum | ";
-    if (value & DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice) result += "LocalMultiDevice | ";
-    return "{" + result.substr(0, result.size() - 3) + "}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(SwapchainCreateFlagBitsKHR value)
-  {
-    switch (value)
-    {
-    case SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions: return "SplitInstanceBindRegions";
-    case SwapchainCreateFlagBitsKHR::eProtected: return "Protected";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(SwapchainCreateFlagsKHR value)
-  {
-    if (!value) return "{}";
-    std::string result;
-    if (value & SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions) result += "SplitInstanceBindRegions | ";
-    if (value & SwapchainCreateFlagBitsKHR::eProtected) result += "Protected | ";
-    return "{" + result.substr(0, result.size() - 3) + "}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(ViewportCoordinateSwizzleNV value)
-  {
-    switch (value)
-    {
-    case ViewportCoordinateSwizzleNV::ePositiveX: return "PositiveX";
-    case ViewportCoordinateSwizzleNV::eNegativeX: return "NegativeX";
-    case ViewportCoordinateSwizzleNV::ePositiveY: return "PositiveY";
-    case ViewportCoordinateSwizzleNV::eNegativeY: return "NegativeY";
-    case ViewportCoordinateSwizzleNV::ePositiveZ: return "PositiveZ";
-    case ViewportCoordinateSwizzleNV::eNegativeZ: return "NegativeZ";
-    case ViewportCoordinateSwizzleNV::ePositiveW: return "PositiveW";
-    case ViewportCoordinateSwizzleNV::eNegativeW: return "NegativeW";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(DiscardRectangleModeEXT value)
-  {
-    switch (value)
-    {
-    case DiscardRectangleModeEXT::eInclusive: return "Inclusive";
-    case DiscardRectangleModeEXT::eExclusive: return "Exclusive";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(SubpassDescriptionFlagBits value)
-  {
-    switch (value)
-    {
-    case SubpassDescriptionFlagBits::ePerViewAttributesNVX: return "PerViewAttributesNVX";
-    case SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX: return "PerViewPositionXOnlyNVX";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(SubpassDescriptionFlags value)
-  {
-    if (!value) return "{}";
-    std::string result;
-    if (value & SubpassDescriptionFlagBits::ePerViewAttributesNVX) result += "PerViewAttributesNVX | ";
-    if (value & SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX) result += "PerViewPositionXOnlyNVX | ";
-    return "{" + result.substr(0, result.size() - 3) + "}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(PointClippingBehavior value)
-  {
-    switch (value)
-    {
-    case PointClippingBehavior::eAllClipPlanes: return "AllClipPlanes";
-    case PointClippingBehavior::eUserClipPlanesOnly: return "UserClipPlanesOnly";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(SamplerReductionModeEXT value)
-  {
-    switch (value)
-    {
-    case SamplerReductionModeEXT::eWeightedAverage: return "WeightedAverage";
-    case SamplerReductionModeEXT::eMin: return "Min";
-    case SamplerReductionModeEXT::eMax: return "Max";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(TessellationDomainOrigin value)
-  {
-    switch (value)
-    {
-    case TessellationDomainOrigin::eUpperLeft: return "UpperLeft";
-    case TessellationDomainOrigin::eLowerLeft: return "LowerLeft";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(SamplerYcbcrModelConversion value)
-  {
-    switch (value)
-    {
-    case SamplerYcbcrModelConversion::eRgbIdentity: return "RgbIdentity";
-    case SamplerYcbcrModelConversion::eYcbcrIdentity: return "YcbcrIdentity";
-    case SamplerYcbcrModelConversion::eYcbcr709: return "Ycbcr709";
-    case SamplerYcbcrModelConversion::eYcbcr601: return "Ycbcr601";
-    case SamplerYcbcrModelConversion::eYcbcr2020: return "Ycbcr2020";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(SamplerYcbcrRange value)
-  {
-    switch (value)
-    {
-    case SamplerYcbcrRange::eItuFull: return "ItuFull";
-    case SamplerYcbcrRange::eItuNarrow: return "ItuNarrow";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(ChromaLocation value)
-  {
-    switch (value)
-    {
-    case ChromaLocation::eCositedEven: return "CositedEven";
-    case ChromaLocation::eMidpoint: return "Midpoint";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(BlendOverlapEXT value)
-  {
-    switch (value)
-    {
-    case BlendOverlapEXT::eUncorrelated: return "Uncorrelated";
-    case BlendOverlapEXT::eDisjoint: return "Disjoint";
-    case BlendOverlapEXT::eConjoint: return "Conjoint";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(CoverageModulationModeNV value)
-  {
-    switch (value)
-    {
-    case CoverageModulationModeNV::eNone: return "None";
-    case CoverageModulationModeNV::eRgb: return "Rgb";
-    case CoverageModulationModeNV::eAlpha: return "Alpha";
-    case CoverageModulationModeNV::eRgba: return "Rgba";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(ValidationCacheHeaderVersionEXT value)
-  {
-    switch (value)
-    {
-    case ValidationCacheHeaderVersionEXT::eOne: return "One";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(ShaderInfoTypeAMD value)
-  {
-    switch (value)
-    {
-    case ShaderInfoTypeAMD::eStatistics: return "Statistics";
-    case ShaderInfoTypeAMD::eBinary: return "Binary";
-    case ShaderInfoTypeAMD::eDisassembly: return "Disassembly";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(QueueGlobalPriorityEXT value)
-  {
-    switch (value)
-    {
-    case QueueGlobalPriorityEXT::eLow: return "Low";
-    case QueueGlobalPriorityEXT::eMedium: return "Medium";
-    case QueueGlobalPriorityEXT::eHigh: return "High";
-    case QueueGlobalPriorityEXT::eRealtime: return "Realtime";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(DebugUtilsMessageSeverityFlagBitsEXT value)
-  {
-    switch (value)
-    {
-    case DebugUtilsMessageSeverityFlagBitsEXT::eVerbose: return "Verbose";
-    case DebugUtilsMessageSeverityFlagBitsEXT::eInfo: return "Info";
-    case DebugUtilsMessageSeverityFlagBitsEXT::eWarning: return "Warning";
-    case DebugUtilsMessageSeverityFlagBitsEXT::eError: return "Error";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(DebugUtilsMessageSeverityFlagsEXT value)
-  {
-    if (!value) return "{}";
-    std::string result;
-    if (value & DebugUtilsMessageSeverityFlagBitsEXT::eVerbose) result += "Verbose | ";
-    if (value & DebugUtilsMessageSeverityFlagBitsEXT::eInfo) result += "Info | ";
-    if (value & DebugUtilsMessageSeverityFlagBitsEXT::eWarning) result += "Warning | ";
-    if (value & DebugUtilsMessageSeverityFlagBitsEXT::eError) result += "Error | ";
-    return "{" + result.substr(0, result.size() - 3) + "}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(DebugUtilsMessageTypeFlagBitsEXT value)
-  {
-    switch (value)
-    {
-    case DebugUtilsMessageTypeFlagBitsEXT::eGeneral: return "General";
-    case DebugUtilsMessageTypeFlagBitsEXT::eValidation: return "Validation";
-    case DebugUtilsMessageTypeFlagBitsEXT::ePerformance: return "Performance";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(DebugUtilsMessageTypeFlagsEXT value)
-  {
-    if (!value) return "{}";
-    std::string result;
-    if (value & DebugUtilsMessageTypeFlagBitsEXT::eGeneral) result += "General | ";
-    if (value & DebugUtilsMessageTypeFlagBitsEXT::eValidation) result += "Validation | ";
-    if (value & DebugUtilsMessageTypeFlagBitsEXT::ePerformance) result += "Performance | ";
-    return "{" + result.substr(0, result.size() - 3) + "}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(ConservativeRasterizationModeEXT value)
-  {
-    switch (value)
-    {
-    case ConservativeRasterizationModeEXT::eDisabled: return "Disabled";
-    case ConservativeRasterizationModeEXT::eOverestimate: return "Overestimate";
-    case ConservativeRasterizationModeEXT::eUnderestimate: return "Underestimate";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(DescriptorBindingFlagBitsEXT value)
-  {
-    switch (value)
-    {
-    case DescriptorBindingFlagBitsEXT::eUpdateAfterBind: return "UpdateAfterBind";
-    case DescriptorBindingFlagBitsEXT::eUpdateUnusedWhilePending: return "UpdateUnusedWhilePending";
-    case DescriptorBindingFlagBitsEXT::ePartiallyBound: return "PartiallyBound";
-    case DescriptorBindingFlagBitsEXT::eVariableDescriptorCount: return "VariableDescriptorCount";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(DescriptorBindingFlagsEXT value)
-  {
-    if (!value) return "{}";
-    std::string result;
-    if (value & DescriptorBindingFlagBitsEXT::eUpdateAfterBind) result += "UpdateAfterBind | ";
-    if (value & DescriptorBindingFlagBitsEXT::eUpdateUnusedWhilePending) result += "UpdateUnusedWhilePending | ";
-    if (value & DescriptorBindingFlagBitsEXT::ePartiallyBound) result += "PartiallyBound | ";
-    if (value & DescriptorBindingFlagBitsEXT::eVariableDescriptorCount) result += "VariableDescriptorCount | ";
-    return "{" + result.substr(0, result.size() - 3) + "}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(VendorId value)
-  {
-    switch (value)
-    {
-    case VendorId::eViv: return "Viv";
-    case VendorId::eVsi: return "Vsi";
-    case VendorId::eKazan: return "Kazan";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(DriverIdKHR value)
-  {
-    switch (value)
-    {
-    case DriverIdKHR::eAmdProprietary: return "AmdProprietary";
-    case DriverIdKHR::eAmdOpenSource: return "AmdOpenSource";
-    case DriverIdKHR::eMesaRadv: return "MesaRadv";
-    case DriverIdKHR::eNvidiaProprietary: return "NvidiaProprietary";
-    case DriverIdKHR::eIntelProprietaryWindows: return "IntelProprietaryWindows";
-    case DriverIdKHR::eIntelOpenSourceMesa: return "IntelOpenSourceMesa";
-    case DriverIdKHR::eImaginationProprietary: return "ImaginationProprietary";
-    case DriverIdKHR::eQualcommProprietary: return "QualcommProprietary";
-    case DriverIdKHR::eArmProprietary: return "ArmProprietary";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(ConditionalRenderingFlagBitsEXT value)
-  {
-    switch (value)
-    {
-    case ConditionalRenderingFlagBitsEXT::eInverted: return "Inverted";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(ConditionalRenderingFlagsEXT value)
-  {
-    if (!value) return "{}";
-    std::string result;
-    if (value & ConditionalRenderingFlagBitsEXT::eInverted) result += "Inverted | ";
-    return "{" + result.substr(0, result.size() - 3) + "}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(ShadingRatePaletteEntryNV value)
-  {
-    switch (value)
-    {
-    case ShadingRatePaletteEntryNV::eNoInvocations: return "NoInvocations";
-    case ShadingRatePaletteEntryNV::e16InvocationsPerPixel: return "16InvocationsPerPixel";
-    case ShadingRatePaletteEntryNV::e8InvocationsPerPixel: return "8InvocationsPerPixel";
-    case ShadingRatePaletteEntryNV::e4InvocationsPerPixel: return "4InvocationsPerPixel";
-    case ShadingRatePaletteEntryNV::e2InvocationsPerPixel: return "2InvocationsPerPixel";
-    case ShadingRatePaletteEntryNV::e1InvocationPerPixel: return "1InvocationPerPixel";
-    case ShadingRatePaletteEntryNV::e1InvocationPer2X1Pixels: return "1InvocationPer2X1Pixels";
-    case ShadingRatePaletteEntryNV::e1InvocationPer1X2Pixels: return "1InvocationPer1X2Pixels";
-    case ShadingRatePaletteEntryNV::e1InvocationPer2X2Pixels: return "1InvocationPer2X2Pixels";
-    case ShadingRatePaletteEntryNV::e1InvocationPer4X2Pixels: return "1InvocationPer4X2Pixels";
-    case ShadingRatePaletteEntryNV::e1InvocationPer2X4Pixels: return "1InvocationPer2X4Pixels";
-    case ShadingRatePaletteEntryNV::e1InvocationPer4X4Pixels: return "1InvocationPer4X4Pixels";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(CoarseSampleOrderTypeNV value)
-  {
-    switch (value)
-    {
-    case CoarseSampleOrderTypeNV::eDefault: return "Default";
-    case CoarseSampleOrderTypeNV::eCustom: return "Custom";
-    case CoarseSampleOrderTypeNV::ePixelMajor: return "PixelMajor";
-    case CoarseSampleOrderTypeNV::eSampleMajor: return "SampleMajor";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(GeometryInstanceFlagBitsNV value)
-  {
-    switch (value)
-    {
-    case GeometryInstanceFlagBitsNV::eTriangleCullDisable: return "TriangleCullDisable";
-    case GeometryInstanceFlagBitsNV::eTriangleFrontCounterclockwise: return "TriangleFrontCounterclockwise";
-    case GeometryInstanceFlagBitsNV::eForceOpaque: return "ForceOpaque";
-    case GeometryInstanceFlagBitsNV::eForceNoOpaque: return "ForceNoOpaque";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(GeometryInstanceFlagsNV value)
-  {
-    if (!value) return "{}";
-    std::string result;
-    if (value & GeometryInstanceFlagBitsNV::eTriangleCullDisable) result += "TriangleCullDisable | ";
-    if (value & GeometryInstanceFlagBitsNV::eTriangleFrontCounterclockwise) result += "TriangleFrontCounterclockwise | ";
-    if (value & GeometryInstanceFlagBitsNV::eForceOpaque) result += "ForceOpaque | ";
-    if (value & GeometryInstanceFlagBitsNV::eForceNoOpaque) result += "ForceNoOpaque | ";
-    return "{" + result.substr(0, result.size() - 3) + "}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(GeometryFlagBitsNV value)
-  {
-    switch (value)
-    {
-    case GeometryFlagBitsNV::eOpaque: return "Opaque";
-    case GeometryFlagBitsNV::eNoDuplicateAnyHitInvocation: return "NoDuplicateAnyHitInvocation";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(GeometryFlagsNV value)
-  {
-    if (!value) return "{}";
-    std::string result;
-    if (value & GeometryFlagBitsNV::eOpaque) result += "Opaque | ";
-    if (value & GeometryFlagBitsNV::eNoDuplicateAnyHitInvocation) result += "NoDuplicateAnyHitInvocation | ";
-    return "{" + result.substr(0, result.size() - 3) + "}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(BuildAccelerationStructureFlagBitsNV value)
-  {
-    switch (value)
-    {
-    case BuildAccelerationStructureFlagBitsNV::eAllowUpdate: return "AllowUpdate";
-    case BuildAccelerationStructureFlagBitsNV::eAllowCompaction: return "AllowCompaction";
-    case BuildAccelerationStructureFlagBitsNV::ePreferFastTrace: return "PreferFastTrace";
-    case BuildAccelerationStructureFlagBitsNV::ePreferFastBuild: return "PreferFastBuild";
-    case BuildAccelerationStructureFlagBitsNV::eLowMemory: return "LowMemory";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(BuildAccelerationStructureFlagsNV value)
-  {
-    if (!value) return "{}";
-    std::string result;
-    if (value & BuildAccelerationStructureFlagBitsNV::eAllowUpdate) result += "AllowUpdate | ";
-    if (value & BuildAccelerationStructureFlagBitsNV::eAllowCompaction) result += "AllowCompaction | ";
-    if (value & BuildAccelerationStructureFlagBitsNV::ePreferFastTrace) result += "PreferFastTrace | ";
-    if (value & BuildAccelerationStructureFlagBitsNV::ePreferFastBuild) result += "PreferFastBuild | ";
-    if (value & BuildAccelerationStructureFlagBitsNV::eLowMemory) result += "LowMemory | ";
-    return "{" + result.substr(0, result.size() - 3) + "}";
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(CopyAccelerationStructureModeNV value)
-  {
-    switch (value)
-    {
-    case CopyAccelerationStructureModeNV::eClone: return "Clone";
-    case CopyAccelerationStructureModeNV::eCompact: return "Compact";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(AccelerationStructureTypeNV value)
-  {
-    switch (value)
-    {
-    case AccelerationStructureTypeNV::eTopLevel: return "TopLevel";
-    case AccelerationStructureTypeNV::eBottomLevel: return "BottomLevel";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(GeometryTypeNV value)
-  {
-    switch (value)
-    {
-    case GeometryTypeNV::eTriangles: return "Triangles";
-    case GeometryTypeNV::eAabbs: return "Aabbs";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(AccelerationStructureMemoryRequirementsTypeNV value)
-  {
-    switch (value)
-    {
-    case AccelerationStructureMemoryRequirementsTypeNV::eObject: return "Object";
-    case AccelerationStructureMemoryRequirementsTypeNV::eBuildScratch: return "BuildScratch";
-    case AccelerationStructureMemoryRequirementsTypeNV::eUpdateScratch: return "UpdateScratch";
-    default: return "invalid";
-    }
-  }
-
-  VULKAN_HPP_INLINE std::string to_string(RayTracingShaderGroupTypeNV value)
-  {
-    switch (value)
-    {
-    case RayTracingShaderGroupTypeNV::eGeneral: return "General";
-    case RayTracingShaderGroupTypeNV::eTrianglesHitGroup: return "TrianglesHitGroup";
-    case RayTracingShaderGroupTypeNV::eProceduralHitGroup: return "ProceduralHitGroup";
-    default: return "invalid";
-    }
-  }
+#endif
 
-  VULKAN_HPP_INLINE std::string to_string(MemoryOverallocationBehaviorAMD value)
-  {
-    switch (value)
-    {
-    case MemoryOverallocationBehaviorAMD::eDefault: return "Default";
-    case MemoryOverallocationBehaviorAMD::eAllowed: return "Allowed";
-    case MemoryOverallocationBehaviorAMD::eDisallowed: return "Disallowed";
-    default: return "invalid";
-    }
-  }
 
   class DispatchLoaderDynamic
   {
   public:
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    PFN_vkAcquireFullScreenExclusiveModeEXT vkAcquireFullScreenExclusiveModeEXT = 0;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
     PFN_vkAcquireNextImage2KHR vkAcquireNextImage2KHR = 0;
     PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR = 0;
-#ifdef VK_USE_PLATFORM_XLIB_XRANDR_NV
+    PFN_vkAcquirePerformanceConfigurationINTEL vkAcquirePerformanceConfigurationINTEL = 0;
+    PFN_vkAcquireProfilingLockKHR vkAcquireProfilingLockKHR = 0;
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
     PFN_vkAcquireXlibDisplayEXT vkAcquireXlibDisplayEXT = 0;
-#endif /*VK_USE_PLATFORM_XLIB_XRANDR_NV*/
+#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
     PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers = 0;
     PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets = 0;
     PFN_vkAllocateMemory vkAllocateMemory = 0;
     PFN_vkBeginCommandBuffer vkBeginCommandBuffer = 0;
     PFN_vkBindAccelerationStructureMemoryNV vkBindAccelerationStructureMemoryNV = 0;
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    PFN_vkBindAccelerationStructureMemoryKHR vkBindAccelerationStructureMemoryKHR = 0;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
     PFN_vkBindBufferMemory vkBindBufferMemory = 0;
-    PFN_vkBindBufferMemory2 vkBindBufferMemory2 = 0;
     PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR = 0;
+    PFN_vkBindBufferMemory2 vkBindBufferMemory2 = 0;
     PFN_vkBindImageMemory vkBindImageMemory = 0;
-    PFN_vkBindImageMemory2 vkBindImageMemory2 = 0;
     PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR = 0;
+    PFN_vkBindImageMemory2 vkBindImageMemory2 = 0;
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    PFN_vkBuildAccelerationStructureKHR vkBuildAccelerationStructureKHR = 0;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
     PFN_vkCmdBeginConditionalRenderingEXT vkCmdBeginConditionalRenderingEXT = 0;
     PFN_vkCmdBeginDebugUtilsLabelEXT vkCmdBeginDebugUtilsLabelEXT = 0;
     PFN_vkCmdBeginQuery vkCmdBeginQuery = 0;
     PFN_vkCmdBeginQueryIndexedEXT vkCmdBeginQueryIndexedEXT = 0;
     PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass = 0;
     PFN_vkCmdBeginRenderPass2KHR vkCmdBeginRenderPass2KHR = 0;
+    PFN_vkCmdBeginRenderPass2 vkCmdBeginRenderPass2 = 0;
     PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT = 0;
     PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets = 0;
     PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer = 0;
     PFN_vkCmdBindPipeline vkCmdBindPipeline = 0;
+    PFN_vkCmdBindPipelineShaderGroupNV vkCmdBindPipelineShaderGroupNV = 0;
     PFN_vkCmdBindShadingRateImageNV vkCmdBindShadingRateImageNV = 0;
     PFN_vkCmdBindTransformFeedbackBuffersEXT vkCmdBindTransformFeedbackBuffersEXT = 0;
     PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers = 0;
+    PFN_vkCmdBindVertexBuffers2EXT vkCmdBindVertexBuffers2EXT = 0;
     PFN_vkCmdBlitImage vkCmdBlitImage = 0;
+    PFN_vkCmdBlitImage2KHR vkCmdBlitImage2KHR = 0;
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    PFN_vkCmdBuildAccelerationStructureIndirectKHR vkCmdBuildAccelerationStructureIndirectKHR = 0;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    PFN_vkCmdBuildAccelerationStructureKHR vkCmdBuildAccelerationStructureKHR = 0;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
     PFN_vkCmdBuildAccelerationStructureNV vkCmdBuildAccelerationStructureNV = 0;
     PFN_vkCmdClearAttachments vkCmdClearAttachments = 0;
     PFN_vkCmdClearColorImage vkCmdClearColorImage = 0;
     PFN_vkCmdClearDepthStencilImage vkCmdClearDepthStencilImage = 0;
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    PFN_vkCmdCopyAccelerationStructureKHR vkCmdCopyAccelerationStructureKHR = 0;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
     PFN_vkCmdCopyAccelerationStructureNV vkCmdCopyAccelerationStructureNV = 0;
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    PFN_vkCmdCopyAccelerationStructureToMemoryKHR vkCmdCopyAccelerationStructureToMemoryKHR = 0;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
     PFN_vkCmdCopyBuffer vkCmdCopyBuffer = 0;
+    PFN_vkCmdCopyBuffer2KHR vkCmdCopyBuffer2KHR = 0;
     PFN_vkCmdCopyBufferToImage vkCmdCopyBufferToImage = 0;
+    PFN_vkCmdCopyBufferToImage2KHR vkCmdCopyBufferToImage2KHR = 0;
     PFN_vkCmdCopyImage vkCmdCopyImage = 0;
+    PFN_vkCmdCopyImage2KHR vkCmdCopyImage2KHR = 0;
     PFN_vkCmdCopyImageToBuffer vkCmdCopyImageToBuffer = 0;
+    PFN_vkCmdCopyImageToBuffer2KHR vkCmdCopyImageToBuffer2KHR = 0;
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    PFN_vkCmdCopyMemoryToAccelerationStructureKHR vkCmdCopyMemoryToAccelerationStructureKHR = 0;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
     PFN_vkCmdCopyQueryPoolResults vkCmdCopyQueryPoolResults = 0;
     PFN_vkCmdDebugMarkerBeginEXT vkCmdDebugMarkerBeginEXT = 0;
     PFN_vkCmdDebugMarkerEndEXT vkCmdDebugMarkerEndEXT = 0;
     PFN_vkCmdDebugMarkerInsertEXT vkCmdDebugMarkerInsertEXT = 0;
     PFN_vkCmdDispatch vkCmdDispatch = 0;
-    PFN_vkCmdDispatchBase vkCmdDispatchBase = 0;
     PFN_vkCmdDispatchBaseKHR vkCmdDispatchBaseKHR = 0;
+    PFN_vkCmdDispatchBase vkCmdDispatchBase = 0;
     PFN_vkCmdDispatchIndirect vkCmdDispatchIndirect = 0;
     PFN_vkCmdDraw vkCmdDraw = 0;
     PFN_vkCmdDrawIndexed vkCmdDrawIndexed = 0;
     PFN_vkCmdDrawIndexedIndirect vkCmdDrawIndexedIndirect = 0;
     PFN_vkCmdDrawIndexedIndirectCountAMD vkCmdDrawIndexedIndirectCountAMD = 0;
     PFN_vkCmdDrawIndexedIndirectCountKHR vkCmdDrawIndexedIndirectCountKHR = 0;
+    PFN_vkCmdDrawIndexedIndirectCount vkCmdDrawIndexedIndirectCount = 0;
     PFN_vkCmdDrawIndirect vkCmdDrawIndirect = 0;
     PFN_vkCmdDrawIndirectByteCountEXT vkCmdDrawIndirectByteCountEXT = 0;
     PFN_vkCmdDrawIndirectCountAMD vkCmdDrawIndirectCountAMD = 0;
     PFN_vkCmdDrawIndirectCountKHR vkCmdDrawIndirectCountKHR = 0;
+    PFN_vkCmdDrawIndirectCount vkCmdDrawIndirectCount = 0;
     PFN_vkCmdDrawMeshTasksIndirectCountNV vkCmdDrawMeshTasksIndirectCountNV = 0;
     PFN_vkCmdDrawMeshTasksIndirectNV vkCmdDrawMeshTasksIndirectNV = 0;
     PFN_vkCmdDrawMeshTasksNV vkCmdDrawMeshTasksNV = 0;
@@ -52325,47 +91686,87 @@
     PFN_vkCmdEndQueryIndexedEXT vkCmdEndQueryIndexedEXT = 0;
     PFN_vkCmdEndRenderPass vkCmdEndRenderPass = 0;
     PFN_vkCmdEndRenderPass2KHR vkCmdEndRenderPass2KHR = 0;
+    PFN_vkCmdEndRenderPass2 vkCmdEndRenderPass2 = 0;
     PFN_vkCmdEndTransformFeedbackEXT vkCmdEndTransformFeedbackEXT = 0;
     PFN_vkCmdExecuteCommands vkCmdExecuteCommands = 0;
+    PFN_vkCmdExecuteGeneratedCommandsNV vkCmdExecuteGeneratedCommandsNV = 0;
     PFN_vkCmdFillBuffer vkCmdFillBuffer = 0;
     PFN_vkCmdInsertDebugUtilsLabelEXT vkCmdInsertDebugUtilsLabelEXT = 0;
     PFN_vkCmdNextSubpass vkCmdNextSubpass = 0;
     PFN_vkCmdNextSubpass2KHR vkCmdNextSubpass2KHR = 0;
+    PFN_vkCmdNextSubpass2 vkCmdNextSubpass2 = 0;
     PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier = 0;
-    PFN_vkCmdProcessCommandsNVX vkCmdProcessCommandsNVX = 0;
+    PFN_vkCmdPreprocessGeneratedCommandsNV vkCmdPreprocessGeneratedCommandsNV = 0;
     PFN_vkCmdPushConstants vkCmdPushConstants = 0;
     PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR = 0;
     PFN_vkCmdPushDescriptorSetWithTemplateKHR vkCmdPushDescriptorSetWithTemplateKHR = 0;
-    PFN_vkCmdReserveSpaceForCommandsNVX vkCmdReserveSpaceForCommandsNVX = 0;
     PFN_vkCmdResetEvent vkCmdResetEvent = 0;
     PFN_vkCmdResetQueryPool vkCmdResetQueryPool = 0;
     PFN_vkCmdResolveImage vkCmdResolveImage = 0;
+    PFN_vkCmdResolveImage2KHR vkCmdResolveImage2KHR = 0;
     PFN_vkCmdSetBlendConstants vkCmdSetBlendConstants = 0;
     PFN_vkCmdSetCheckpointNV vkCmdSetCheckpointNV = 0;
     PFN_vkCmdSetCoarseSampleOrderNV vkCmdSetCoarseSampleOrderNV = 0;
+    PFN_vkCmdSetCullModeEXT vkCmdSetCullModeEXT = 0;
     PFN_vkCmdSetDepthBias vkCmdSetDepthBias = 0;
     PFN_vkCmdSetDepthBounds vkCmdSetDepthBounds = 0;
-    PFN_vkCmdSetDeviceMask vkCmdSetDeviceMask = 0;
+    PFN_vkCmdSetDepthBoundsTestEnableEXT vkCmdSetDepthBoundsTestEnableEXT = 0;
+    PFN_vkCmdSetDepthCompareOpEXT vkCmdSetDepthCompareOpEXT = 0;
+    PFN_vkCmdSetDepthTestEnableEXT vkCmdSetDepthTestEnableEXT = 0;
+    PFN_vkCmdSetDepthWriteEnableEXT vkCmdSetDepthWriteEnableEXT = 0;
     PFN_vkCmdSetDeviceMaskKHR vkCmdSetDeviceMaskKHR = 0;
+    PFN_vkCmdSetDeviceMask vkCmdSetDeviceMask = 0;
     PFN_vkCmdSetDiscardRectangleEXT vkCmdSetDiscardRectangleEXT = 0;
     PFN_vkCmdSetEvent vkCmdSetEvent = 0;
     PFN_vkCmdSetExclusiveScissorNV vkCmdSetExclusiveScissorNV = 0;
+    PFN_vkCmdSetFragmentShadingRateKHR vkCmdSetFragmentShadingRateKHR = 0;
+    PFN_vkCmdSetFrontFaceEXT vkCmdSetFrontFaceEXT = 0;
+    PFN_vkCmdSetLineStippleEXT vkCmdSetLineStippleEXT = 0;
     PFN_vkCmdSetLineWidth vkCmdSetLineWidth = 0;
+    PFN_vkCmdSetPerformanceMarkerINTEL vkCmdSetPerformanceMarkerINTEL = 0;
+    PFN_vkCmdSetPerformanceOverrideINTEL vkCmdSetPerformanceOverrideINTEL = 0;
+    PFN_vkCmdSetPerformanceStreamMarkerINTEL vkCmdSetPerformanceStreamMarkerINTEL = 0;
+    PFN_vkCmdSetPrimitiveTopologyEXT vkCmdSetPrimitiveTopologyEXT = 0;
     PFN_vkCmdSetSampleLocationsEXT vkCmdSetSampleLocationsEXT = 0;
     PFN_vkCmdSetScissor vkCmdSetScissor = 0;
+    PFN_vkCmdSetScissorWithCountEXT vkCmdSetScissorWithCountEXT = 0;
     PFN_vkCmdSetStencilCompareMask vkCmdSetStencilCompareMask = 0;
+    PFN_vkCmdSetStencilOpEXT vkCmdSetStencilOpEXT = 0;
     PFN_vkCmdSetStencilReference vkCmdSetStencilReference = 0;
+    PFN_vkCmdSetStencilTestEnableEXT vkCmdSetStencilTestEnableEXT = 0;
     PFN_vkCmdSetStencilWriteMask vkCmdSetStencilWriteMask = 0;
     PFN_vkCmdSetViewport vkCmdSetViewport = 0;
     PFN_vkCmdSetViewportShadingRatePaletteNV vkCmdSetViewportShadingRatePaletteNV = 0;
     PFN_vkCmdSetViewportWScalingNV vkCmdSetViewportWScalingNV = 0;
+    PFN_vkCmdSetViewportWithCountEXT vkCmdSetViewportWithCountEXT = 0;
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    PFN_vkCmdTraceRaysIndirectKHR vkCmdTraceRaysIndirectKHR = 0;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    PFN_vkCmdTraceRaysKHR vkCmdTraceRaysKHR = 0;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
     PFN_vkCmdTraceRaysNV vkCmdTraceRaysNV = 0;
     PFN_vkCmdUpdateBuffer vkCmdUpdateBuffer = 0;
     PFN_vkCmdWaitEvents vkCmdWaitEvents = 0;
     PFN_vkCmdWriteAccelerationStructuresPropertiesNV vkCmdWriteAccelerationStructuresPropertiesNV = 0;
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    PFN_vkCmdWriteAccelerationStructuresPropertiesKHR vkCmdWriteAccelerationStructuresPropertiesKHR = 0;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
     PFN_vkCmdWriteBufferMarkerAMD vkCmdWriteBufferMarkerAMD = 0;
     PFN_vkCmdWriteTimestamp vkCmdWriteTimestamp = 0;
     PFN_vkCompileDeferredNV vkCompileDeferredNV = 0;
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    PFN_vkCopyAccelerationStructureKHR vkCopyAccelerationStructureKHR = 0;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    PFN_vkCopyAccelerationStructureToMemoryKHR vkCopyAccelerationStructureToMemoryKHR = 0;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    PFN_vkCopyMemoryToAccelerationStructureKHR vkCopyMemoryToAccelerationStructureKHR = 0;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    PFN_vkCreateAccelerationStructureKHR vkCreateAccelerationStructureKHR = 0;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
     PFN_vkCreateAccelerationStructureNV vkCreateAccelerationStructureNV = 0;
 #ifdef VK_USE_PLATFORM_ANDROID_KHR
     PFN_vkCreateAndroidSurfaceKHR vkCreateAndroidSurfaceKHR = 0;
@@ -52376,43 +91777,60 @@
     PFN_vkCreateComputePipelines vkCreateComputePipelines = 0;
     PFN_vkCreateDebugReportCallbackEXT vkCreateDebugReportCallbackEXT = 0;
     PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT = 0;
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    PFN_vkCreateDeferredOperationKHR vkCreateDeferredOperationKHR = 0;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
     PFN_vkCreateDescriptorPool vkCreateDescriptorPool = 0;
     PFN_vkCreateDescriptorSetLayout vkCreateDescriptorSetLayout = 0;
-    PFN_vkCreateDescriptorUpdateTemplate vkCreateDescriptorUpdateTemplate = 0;
     PFN_vkCreateDescriptorUpdateTemplateKHR vkCreateDescriptorUpdateTemplateKHR = 0;
+    PFN_vkCreateDescriptorUpdateTemplate vkCreateDescriptorUpdateTemplate = 0;
     PFN_vkCreateDevice vkCreateDevice = 0;
+#ifdef VK_USE_PLATFORM_DIRECTFB_EXT
+    PFN_vkCreateDirectFBSurfaceEXT vkCreateDirectFBSurfaceEXT = 0;
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
     PFN_vkCreateDisplayModeKHR vkCreateDisplayModeKHR = 0;
     PFN_vkCreateDisplayPlaneSurfaceKHR vkCreateDisplayPlaneSurfaceKHR = 0;
     PFN_vkCreateEvent vkCreateEvent = 0;
     PFN_vkCreateFence vkCreateFence = 0;
     PFN_vkCreateFramebuffer vkCreateFramebuffer = 0;
     PFN_vkCreateGraphicsPipelines vkCreateGraphicsPipelines = 0;
+    PFN_vkCreateHeadlessSurfaceEXT vkCreateHeadlessSurfaceEXT = 0;
 #ifdef VK_USE_PLATFORM_IOS_MVK
     PFN_vkCreateIOSSurfaceMVK vkCreateIOSSurfaceMVK = 0;
 #endif /*VK_USE_PLATFORM_IOS_MVK*/
     PFN_vkCreateImage vkCreateImage = 0;
-#ifdef VK_USE_PLATFORM_FUCHSIA_FUCHSIA
+#ifdef VK_USE_PLATFORM_FUCHSIA
     PFN_vkCreateImagePipeSurfaceFUCHSIA vkCreateImagePipeSurfaceFUCHSIA = 0;
-#endif /*VK_USE_PLATFORM_FUCHSIA_FUCHSIA*/
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
     PFN_vkCreateImageView vkCreateImageView = 0;
-    PFN_vkCreateIndirectCommandsLayoutNVX vkCreateIndirectCommandsLayoutNVX = 0;
+    PFN_vkCreateIndirectCommandsLayoutNV vkCreateIndirectCommandsLayoutNV = 0;
     PFN_vkCreateInstance vkCreateInstance = 0;
 #ifdef VK_USE_PLATFORM_MACOS_MVK
     PFN_vkCreateMacOSSurfaceMVK vkCreateMacOSSurfaceMVK = 0;
 #endif /*VK_USE_PLATFORM_MACOS_MVK*/
-    PFN_vkCreateObjectTableNVX vkCreateObjectTableNVX = 0;
+#ifdef VK_USE_PLATFORM_METAL_EXT
+    PFN_vkCreateMetalSurfaceEXT vkCreateMetalSurfaceEXT = 0;
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
     PFN_vkCreatePipelineCache vkCreatePipelineCache = 0;
     PFN_vkCreatePipelineLayout vkCreatePipelineLayout = 0;
+    PFN_vkCreatePrivateDataSlotEXT vkCreatePrivateDataSlotEXT = 0;
     PFN_vkCreateQueryPool vkCreateQueryPool = 0;
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    PFN_vkCreateRayTracingPipelinesKHR vkCreateRayTracingPipelinesKHR = 0;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
     PFN_vkCreateRayTracingPipelinesNV vkCreateRayTracingPipelinesNV = 0;
     PFN_vkCreateRenderPass vkCreateRenderPass = 0;
     PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR = 0;
+    PFN_vkCreateRenderPass2 vkCreateRenderPass2 = 0;
     PFN_vkCreateSampler vkCreateSampler = 0;
-    PFN_vkCreateSamplerYcbcrConversion vkCreateSamplerYcbcrConversion = 0;
     PFN_vkCreateSamplerYcbcrConversionKHR vkCreateSamplerYcbcrConversionKHR = 0;
+    PFN_vkCreateSamplerYcbcrConversion vkCreateSamplerYcbcrConversion = 0;
     PFN_vkCreateSemaphore vkCreateSemaphore = 0;
     PFN_vkCreateShaderModule vkCreateShaderModule = 0;
     PFN_vkCreateSharedSwapchainsKHR vkCreateSharedSwapchainsKHR = 0;
+#ifdef VK_USE_PLATFORM_GGP
+    PFN_vkCreateStreamDescriptorSurfaceGGP vkCreateStreamDescriptorSurfaceGGP = 0;
+#endif /*VK_USE_PLATFORM_GGP*/
     PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR = 0;
     PFN_vkCreateValidationCacheEXT vkCreateValidationCacheEXT = 0;
 #ifdef VK_USE_PLATFORM_VI_NN
@@ -52433,33 +91851,42 @@
     PFN_vkDebugMarkerSetObjectNameEXT vkDebugMarkerSetObjectNameEXT = 0;
     PFN_vkDebugMarkerSetObjectTagEXT vkDebugMarkerSetObjectTagEXT = 0;
     PFN_vkDebugReportMessageEXT vkDebugReportMessageEXT = 0;
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    PFN_vkDeferredOperationJoinKHR vkDeferredOperationJoinKHR = 0;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
     PFN_vkDestroyAccelerationStructureNV vkDestroyAccelerationStructureNV = 0;
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    PFN_vkDestroyAccelerationStructureKHR vkDestroyAccelerationStructureKHR = 0;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
     PFN_vkDestroyBuffer vkDestroyBuffer = 0;
     PFN_vkDestroyBufferView vkDestroyBufferView = 0;
     PFN_vkDestroyCommandPool vkDestroyCommandPool = 0;
     PFN_vkDestroyDebugReportCallbackEXT vkDestroyDebugReportCallbackEXT = 0;
     PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT = 0;
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    PFN_vkDestroyDeferredOperationKHR vkDestroyDeferredOperationKHR = 0;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
     PFN_vkDestroyDescriptorPool vkDestroyDescriptorPool = 0;
     PFN_vkDestroyDescriptorSetLayout vkDestroyDescriptorSetLayout = 0;
-    PFN_vkDestroyDescriptorUpdateTemplate vkDestroyDescriptorUpdateTemplate = 0;
     PFN_vkDestroyDescriptorUpdateTemplateKHR vkDestroyDescriptorUpdateTemplateKHR = 0;
+    PFN_vkDestroyDescriptorUpdateTemplate vkDestroyDescriptorUpdateTemplate = 0;
     PFN_vkDestroyDevice vkDestroyDevice = 0;
     PFN_vkDestroyEvent vkDestroyEvent = 0;
     PFN_vkDestroyFence vkDestroyFence = 0;
     PFN_vkDestroyFramebuffer vkDestroyFramebuffer = 0;
     PFN_vkDestroyImage vkDestroyImage = 0;
     PFN_vkDestroyImageView vkDestroyImageView = 0;
-    PFN_vkDestroyIndirectCommandsLayoutNVX vkDestroyIndirectCommandsLayoutNVX = 0;
+    PFN_vkDestroyIndirectCommandsLayoutNV vkDestroyIndirectCommandsLayoutNV = 0;
     PFN_vkDestroyInstance vkDestroyInstance = 0;
-    PFN_vkDestroyObjectTableNVX vkDestroyObjectTableNVX = 0;
     PFN_vkDestroyPipeline vkDestroyPipeline = 0;
     PFN_vkDestroyPipelineCache vkDestroyPipelineCache = 0;
     PFN_vkDestroyPipelineLayout vkDestroyPipelineLayout = 0;
+    PFN_vkDestroyPrivateDataSlotEXT vkDestroyPrivateDataSlotEXT = 0;
     PFN_vkDestroyQueryPool vkDestroyQueryPool = 0;
     PFN_vkDestroyRenderPass vkDestroyRenderPass = 0;
     PFN_vkDestroySampler vkDestroySampler = 0;
-    PFN_vkDestroySamplerYcbcrConversion vkDestroySamplerYcbcrConversion = 0;
     PFN_vkDestroySamplerYcbcrConversionKHR vkDestroySamplerYcbcrConversionKHR = 0;
+    PFN_vkDestroySamplerYcbcrConversion vkDestroySamplerYcbcrConversion = 0;
     PFN_vkDestroySemaphore vkDestroySemaphore = 0;
     PFN_vkDestroyShaderModule vkDestroyShaderModule = 0;
     PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR = 0;
@@ -52473,29 +91900,55 @@
     PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties = 0;
     PFN_vkEnumerateInstanceLayerProperties vkEnumerateInstanceLayerProperties = 0;
     PFN_vkEnumerateInstanceVersion vkEnumerateInstanceVersion = 0;
-    PFN_vkEnumeratePhysicalDeviceGroups vkEnumeratePhysicalDeviceGroups = 0;
     PFN_vkEnumeratePhysicalDeviceGroupsKHR vkEnumeratePhysicalDeviceGroupsKHR = 0;
+    PFN_vkEnumeratePhysicalDeviceGroups vkEnumeratePhysicalDeviceGroups = 0;
+    PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = 0;
     PFN_vkEnumeratePhysicalDevices vkEnumeratePhysicalDevices = 0;
     PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges = 0;
     PFN_vkFreeCommandBuffers vkFreeCommandBuffers = 0;
     PFN_vkFreeDescriptorSets vkFreeDescriptorSets = 0;
     PFN_vkFreeMemory vkFreeMemory = 0;
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    PFN_vkGetAccelerationStructureDeviceAddressKHR vkGetAccelerationStructureDeviceAddressKHR = 0;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
     PFN_vkGetAccelerationStructureHandleNV vkGetAccelerationStructureHandleNV = 0;
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    PFN_vkGetAccelerationStructureMemoryRequirementsKHR vkGetAccelerationStructureMemoryRequirementsKHR = 0;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
     PFN_vkGetAccelerationStructureMemoryRequirementsNV vkGetAccelerationStructureMemoryRequirementsNV = 0;
-#ifdef VK_USE_PLATFORM_ANDROID_ANDROID
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
     PFN_vkGetAndroidHardwareBufferPropertiesANDROID vkGetAndroidHardwareBufferPropertiesANDROID = 0;
-#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+    PFN_vkGetBufferDeviceAddressEXT vkGetBufferDeviceAddressEXT = 0;
+    PFN_vkGetBufferDeviceAddressKHR vkGetBufferDeviceAddressKHR = 0;
+    PFN_vkGetBufferDeviceAddress vkGetBufferDeviceAddress = 0;
     PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements = 0;
-    PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2 = 0;
     PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR = 0;
+    PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2 = 0;
+    PFN_vkGetBufferOpaqueCaptureAddressKHR vkGetBufferOpaqueCaptureAddressKHR = 0;
+    PFN_vkGetBufferOpaqueCaptureAddress vkGetBufferOpaqueCaptureAddress = 0;
     PFN_vkGetCalibratedTimestampsEXT vkGetCalibratedTimestampsEXT = 0;
-    PFN_vkGetDescriptorSetLayoutSupport vkGetDescriptorSetLayoutSupport = 0;
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    PFN_vkGetDeferredOperationMaxConcurrencyKHR vkGetDeferredOperationMaxConcurrencyKHR = 0;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    PFN_vkGetDeferredOperationResultKHR vkGetDeferredOperationResultKHR = 0;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
     PFN_vkGetDescriptorSetLayoutSupportKHR vkGetDescriptorSetLayoutSupportKHR = 0;
-    PFN_vkGetDeviceGroupPeerMemoryFeatures vkGetDeviceGroupPeerMemoryFeatures = 0;
+    PFN_vkGetDescriptorSetLayoutSupport vkGetDescriptorSetLayoutSupport = 0;
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    PFN_vkGetDeviceAccelerationStructureCompatibilityKHR vkGetDeviceAccelerationStructureCompatibilityKHR = 0;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
     PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR vkGetDeviceGroupPeerMemoryFeaturesKHR = 0;
+    PFN_vkGetDeviceGroupPeerMemoryFeatures vkGetDeviceGroupPeerMemoryFeatures = 0;
     PFN_vkGetDeviceGroupPresentCapabilitiesKHR vkGetDeviceGroupPresentCapabilitiesKHR = 0;
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    PFN_vkGetDeviceGroupSurfacePresentModes2EXT vkGetDeviceGroupSurfacePresentModes2EXT = 0;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
     PFN_vkGetDeviceGroupSurfacePresentModesKHR vkGetDeviceGroupSurfacePresentModesKHR = 0;
     PFN_vkGetDeviceMemoryCommitment vkGetDeviceMemoryCommitment = 0;
+    PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR vkGetDeviceMemoryOpaqueCaptureAddressKHR = 0;
+    PFN_vkGetDeviceMemoryOpaqueCaptureAddress vkGetDeviceMemoryOpaqueCaptureAddress = 0;
     PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr = 0;
     PFN_vkGetDeviceQueue vkGetDeviceQueue = 0;
     PFN_vkGetDeviceQueue2 vkGetDeviceQueue2 = 0;
@@ -52510,74 +91963,88 @@
 #ifdef VK_USE_PLATFORM_WIN32_KHR
     PFN_vkGetFenceWin32HandleKHR vkGetFenceWin32HandleKHR = 0;
 #endif /*VK_USE_PLATFORM_WIN32_KHR*/
+    PFN_vkGetGeneratedCommandsMemoryRequirementsNV vkGetGeneratedCommandsMemoryRequirementsNV = 0;
     PFN_vkGetImageDrmFormatModifierPropertiesEXT vkGetImageDrmFormatModifierPropertiesEXT = 0;
     PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements = 0;
-    PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2 = 0;
     PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR = 0;
+    PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2 = 0;
     PFN_vkGetImageSparseMemoryRequirements vkGetImageSparseMemoryRequirements = 0;
-    PFN_vkGetImageSparseMemoryRequirements2 vkGetImageSparseMemoryRequirements2 = 0;
     PFN_vkGetImageSparseMemoryRequirements2KHR vkGetImageSparseMemoryRequirements2KHR = 0;
+    PFN_vkGetImageSparseMemoryRequirements2 vkGetImageSparseMemoryRequirements2 = 0;
     PFN_vkGetImageSubresourceLayout vkGetImageSubresourceLayout = 0;
+    PFN_vkGetImageViewAddressNVX vkGetImageViewAddressNVX = 0;
+    PFN_vkGetImageViewHandleNVX vkGetImageViewHandleNVX = 0;
     PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = 0;
-#ifdef VK_USE_PLATFORM_ANDROID_ANDROID
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
     PFN_vkGetMemoryAndroidHardwareBufferANDROID vkGetMemoryAndroidHardwareBufferANDROID = 0;
-#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
     PFN_vkGetMemoryFdKHR vkGetMemoryFdKHR = 0;
     PFN_vkGetMemoryFdPropertiesKHR vkGetMemoryFdPropertiesKHR = 0;
     PFN_vkGetMemoryHostPointerPropertiesEXT vkGetMemoryHostPointerPropertiesEXT = 0;
 #ifdef VK_USE_PLATFORM_WIN32_KHR
     PFN_vkGetMemoryWin32HandleKHR vkGetMemoryWin32HandleKHR = 0;
 #endif /*VK_USE_PLATFORM_WIN32_KHR*/
-#ifdef VK_USE_PLATFORM_WIN32_NV
+#ifdef VK_USE_PLATFORM_WIN32_KHR
     PFN_vkGetMemoryWin32HandleNV vkGetMemoryWin32HandleNV = 0;
-#endif /*VK_USE_PLATFORM_WIN32_NV*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
 #ifdef VK_USE_PLATFORM_WIN32_KHR
     PFN_vkGetMemoryWin32HandlePropertiesKHR vkGetMemoryWin32HandlePropertiesKHR = 0;
 #endif /*VK_USE_PLATFORM_WIN32_KHR*/
     PFN_vkGetPastPresentationTimingGOOGLE vkGetPastPresentationTimingGOOGLE = 0;
+    PFN_vkGetPerformanceParameterINTEL vkGetPerformanceParameterINTEL = 0;
     PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = 0;
+    PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = 0;
+#ifdef VK_USE_PLATFORM_DIRECTFB_EXT
+    PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT vkGetPhysicalDeviceDirectFBPresentationSupportEXT = 0;
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
     PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR vkGetPhysicalDeviceDisplayPlaneProperties2KHR = 0;
     PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR vkGetPhysicalDeviceDisplayPlanePropertiesKHR = 0;
     PFN_vkGetPhysicalDeviceDisplayProperties2KHR vkGetPhysicalDeviceDisplayProperties2KHR = 0;
     PFN_vkGetPhysicalDeviceDisplayPropertiesKHR vkGetPhysicalDeviceDisplayPropertiesKHR = 0;
-    PFN_vkGetPhysicalDeviceExternalBufferProperties vkGetPhysicalDeviceExternalBufferProperties = 0;
     PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR vkGetPhysicalDeviceExternalBufferPropertiesKHR = 0;
-    PFN_vkGetPhysicalDeviceExternalFenceProperties vkGetPhysicalDeviceExternalFenceProperties = 0;
+    PFN_vkGetPhysicalDeviceExternalBufferProperties vkGetPhysicalDeviceExternalBufferProperties = 0;
     PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR vkGetPhysicalDeviceExternalFencePropertiesKHR = 0;
+    PFN_vkGetPhysicalDeviceExternalFenceProperties vkGetPhysicalDeviceExternalFenceProperties = 0;
     PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV vkGetPhysicalDeviceExternalImageFormatPropertiesNV = 0;
-    PFN_vkGetPhysicalDeviceExternalSemaphoreProperties vkGetPhysicalDeviceExternalSemaphoreProperties = 0;
     PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = 0;
+    PFN_vkGetPhysicalDeviceExternalSemaphoreProperties vkGetPhysicalDeviceExternalSemaphoreProperties = 0;
     PFN_vkGetPhysicalDeviceFeatures vkGetPhysicalDeviceFeatures = 0;
-    PFN_vkGetPhysicalDeviceFeatures2 vkGetPhysicalDeviceFeatures2 = 0;
     PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR = 0;
+    PFN_vkGetPhysicalDeviceFeatures2 vkGetPhysicalDeviceFeatures2 = 0;
     PFN_vkGetPhysicalDeviceFormatProperties vkGetPhysicalDeviceFormatProperties = 0;
-    PFN_vkGetPhysicalDeviceFormatProperties2 vkGetPhysicalDeviceFormatProperties2 = 0;
     PFN_vkGetPhysicalDeviceFormatProperties2KHR vkGetPhysicalDeviceFormatProperties2KHR = 0;
-    PFN_vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX = 0;
+    PFN_vkGetPhysicalDeviceFormatProperties2 vkGetPhysicalDeviceFormatProperties2 = 0;
+    PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR vkGetPhysicalDeviceFragmentShadingRatesKHR = 0;
     PFN_vkGetPhysicalDeviceImageFormatProperties vkGetPhysicalDeviceImageFormatProperties = 0;
-    PFN_vkGetPhysicalDeviceImageFormatProperties2 vkGetPhysicalDeviceImageFormatProperties2 = 0;
     PFN_vkGetPhysicalDeviceImageFormatProperties2KHR vkGetPhysicalDeviceImageFormatProperties2KHR = 0;
+    PFN_vkGetPhysicalDeviceImageFormatProperties2 vkGetPhysicalDeviceImageFormatProperties2 = 0;
     PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties = 0;
-    PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2 = 0;
     PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR = 0;
+    PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2 = 0;
     PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT vkGetPhysicalDeviceMultisamplePropertiesEXT = 0;
     PFN_vkGetPhysicalDevicePresentRectanglesKHR vkGetPhysicalDevicePresentRectanglesKHR = 0;
     PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties = 0;
-    PFN_vkGetPhysicalDeviceProperties2 vkGetPhysicalDeviceProperties2 = 0;
     PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR = 0;
+    PFN_vkGetPhysicalDeviceProperties2 vkGetPhysicalDeviceProperties2 = 0;
+    PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = 0;
     PFN_vkGetPhysicalDeviceQueueFamilyProperties vkGetPhysicalDeviceQueueFamilyProperties = 0;
-    PFN_vkGetPhysicalDeviceQueueFamilyProperties2 vkGetPhysicalDeviceQueueFamilyProperties2 = 0;
     PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR vkGetPhysicalDeviceQueueFamilyProperties2KHR = 0;
+    PFN_vkGetPhysicalDeviceQueueFamilyProperties2 vkGetPhysicalDeviceQueueFamilyProperties2 = 0;
     PFN_vkGetPhysicalDeviceSparseImageFormatProperties vkGetPhysicalDeviceSparseImageFormatProperties = 0;
-    PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 vkGetPhysicalDeviceSparseImageFormatProperties2 = 0;
     PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR vkGetPhysicalDeviceSparseImageFormatProperties2KHR = 0;
+    PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 vkGetPhysicalDeviceSparseImageFormatProperties2 = 0;
+    PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = 0;
     PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT vkGetPhysicalDeviceSurfaceCapabilities2EXT = 0;
     PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR vkGetPhysicalDeviceSurfaceCapabilities2KHR = 0;
     PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR = 0;
     PFN_vkGetPhysicalDeviceSurfaceFormats2KHR vkGetPhysicalDeviceSurfaceFormats2KHR = 0;
     PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR = 0;
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT vkGetPhysicalDeviceSurfacePresentModes2EXT = 0;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
     PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR = 0;
     PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR = 0;
+    PFN_vkGetPhysicalDeviceToolPropertiesEXT vkGetPhysicalDeviceToolPropertiesEXT = 0;
 #ifdef VK_USE_PLATFORM_WAYLAND_KHR
     PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR vkGetPhysicalDeviceWaylandPresentationSupportKHR = 0;
 #endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
@@ -52591,14 +92058,26 @@
     PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR vkGetPhysicalDeviceXlibPresentationSupportKHR = 0;
 #endif /*VK_USE_PLATFORM_XLIB_KHR*/
     PFN_vkGetPipelineCacheData vkGetPipelineCacheData = 0;
+    PFN_vkGetPipelineExecutableInternalRepresentationsKHR vkGetPipelineExecutableInternalRepresentationsKHR = 0;
+    PFN_vkGetPipelineExecutablePropertiesKHR vkGetPipelineExecutablePropertiesKHR = 0;
+    PFN_vkGetPipelineExecutableStatisticsKHR vkGetPipelineExecutableStatisticsKHR = 0;
+    PFN_vkGetPrivateDataEXT vkGetPrivateDataEXT = 0;
     PFN_vkGetQueryPoolResults vkGetQueryPoolResults = 0;
     PFN_vkGetQueueCheckpointDataNV vkGetQueueCheckpointDataNV = 0;
-#ifdef VK_USE_PLATFORM_XLIB_XRANDR_NV
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
     PFN_vkGetRandROutputDisplayEXT vkGetRandROutputDisplayEXT = 0;
-#endif /*VK_USE_PLATFORM_XLIB_XRANDR_NV*/
+#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = 0;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
     PFN_vkGetRayTracingShaderGroupHandlesNV vkGetRayTracingShaderGroupHandlesNV = 0;
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    PFN_vkGetRayTracingShaderGroupHandlesKHR vkGetRayTracingShaderGroupHandlesKHR = 0;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
     PFN_vkGetRefreshCycleDurationGOOGLE vkGetRefreshCycleDurationGOOGLE = 0;
     PFN_vkGetRenderAreaGranularity vkGetRenderAreaGranularity = 0;
+    PFN_vkGetSemaphoreCounterValueKHR vkGetSemaphoreCounterValueKHR = 0;
+    PFN_vkGetSemaphoreCounterValue vkGetSemaphoreCounterValue = 0;
     PFN_vkGetSemaphoreFdKHR vkGetSemaphoreFdKHR = 0;
 #ifdef VK_USE_PLATFORM_WIN32_KHR
     PFN_vkGetSemaphoreWin32HandleKHR vkGetSemaphoreWin32HandleKHR = 0;
@@ -52616,6 +92095,7 @@
 #ifdef VK_USE_PLATFORM_WIN32_KHR
     PFN_vkImportSemaphoreWin32HandleKHR vkImportSemaphoreWin32HandleKHR = 0;
 #endif /*VK_USE_PLATFORM_WIN32_KHR*/
+    PFN_vkInitializePerformanceApiINTEL vkInitializePerformanceApiINTEL = 0;
     PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges = 0;
     PFN_vkMapMemory vkMapMemory = 0;
     PFN_vkMergePipelineCaches vkMergePipelineCaches = 0;
@@ -52625,432 +92105,1495 @@
     PFN_vkQueueEndDebugUtilsLabelEXT vkQueueEndDebugUtilsLabelEXT = 0;
     PFN_vkQueueInsertDebugUtilsLabelEXT vkQueueInsertDebugUtilsLabelEXT = 0;
     PFN_vkQueuePresentKHR vkQueuePresentKHR = 0;
+    PFN_vkQueueSetPerformanceConfigurationINTEL vkQueueSetPerformanceConfigurationINTEL = 0;
     PFN_vkQueueSubmit vkQueueSubmit = 0;
     PFN_vkQueueWaitIdle vkQueueWaitIdle = 0;
     PFN_vkRegisterDeviceEventEXT vkRegisterDeviceEventEXT = 0;
     PFN_vkRegisterDisplayEventEXT vkRegisterDisplayEventEXT = 0;
-    PFN_vkRegisterObjectsNVX vkRegisterObjectsNVX = 0;
     PFN_vkReleaseDisplayEXT vkReleaseDisplayEXT = 0;
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    PFN_vkReleaseFullScreenExclusiveModeEXT vkReleaseFullScreenExclusiveModeEXT = 0;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+    PFN_vkReleasePerformanceConfigurationINTEL vkReleasePerformanceConfigurationINTEL = 0;
+    PFN_vkReleaseProfilingLockKHR vkReleaseProfilingLockKHR = 0;
     PFN_vkResetCommandBuffer vkResetCommandBuffer = 0;
     PFN_vkResetCommandPool vkResetCommandPool = 0;
     PFN_vkResetDescriptorPool vkResetDescriptorPool = 0;
     PFN_vkResetEvent vkResetEvent = 0;
     PFN_vkResetFences vkResetFences = 0;
+    PFN_vkResetQueryPoolEXT vkResetQueryPoolEXT = 0;
+    PFN_vkResetQueryPool vkResetQueryPool = 0;
     PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT = 0;
     PFN_vkSetDebugUtilsObjectTagEXT vkSetDebugUtilsObjectTagEXT = 0;
     PFN_vkSetEvent vkSetEvent = 0;
     PFN_vkSetHdrMetadataEXT vkSetHdrMetadataEXT = 0;
+    PFN_vkSetLocalDimmingAMD vkSetLocalDimmingAMD = 0;
+    PFN_vkSetPrivateDataEXT vkSetPrivateDataEXT = 0;
+    PFN_vkSignalSemaphoreKHR vkSignalSemaphoreKHR = 0;
+    PFN_vkSignalSemaphore vkSignalSemaphore = 0;
     PFN_vkSubmitDebugUtilsMessageEXT vkSubmitDebugUtilsMessageEXT = 0;
-    PFN_vkTrimCommandPool vkTrimCommandPool = 0;
     PFN_vkTrimCommandPoolKHR vkTrimCommandPoolKHR = 0;
+    PFN_vkTrimCommandPool vkTrimCommandPool = 0;
+    PFN_vkUninitializePerformanceApiINTEL vkUninitializePerformanceApiINTEL = 0;
     PFN_vkUnmapMemory vkUnmapMemory = 0;
-    PFN_vkUnregisterObjectsNVX vkUnregisterObjectsNVX = 0;
-    PFN_vkUpdateDescriptorSetWithTemplate vkUpdateDescriptorSetWithTemplate = 0;
     PFN_vkUpdateDescriptorSetWithTemplateKHR vkUpdateDescriptorSetWithTemplateKHR = 0;
+    PFN_vkUpdateDescriptorSetWithTemplate vkUpdateDescriptorSetWithTemplate = 0;
     PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets = 0;
     PFN_vkWaitForFences vkWaitForFences = 0;
+    PFN_vkWaitSemaphoresKHR vkWaitSemaphoresKHR = 0;
+    PFN_vkWaitSemaphores vkWaitSemaphores = 0;
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    PFN_vkWriteAccelerationStructuresPropertiesKHR vkWriteAccelerationStructuresPropertiesKHR = 0;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
   public:
-    DispatchLoaderDynamic(Instance instance = Instance(), Device device = Device())
+    DispatchLoaderDynamic() VULKAN_HPP_NOEXCEPT = default;
+
+#if !defined(VK_NO_PROTOTYPES)
+    // This interface is designed to be used for per-device function pointers in combination with a linked vulkan library.
+    template <typename DynamicLoader>
+    void init(VULKAN_HPP_NAMESPACE::Instance const& instance, VULKAN_HPP_NAMESPACE::Device const& device, DynamicLoader const& dl) VULKAN_HPP_NOEXCEPT
     {
-      if (instance)
-      {
-        init(instance, device);
+      PFN_vkGetInstanceProcAddr getInstanceProcAddr = dl.template getProcAddress<PFN_vkGetInstanceProcAddr>("vkGetInstanceProcAddr");
+      PFN_vkGetDeviceProcAddr getDeviceProcAddr = dl.template getProcAddress<PFN_vkGetDeviceProcAddr>("vkGetDeviceProcAddr");
+      init(static_cast<VkInstance>(instance), getInstanceProcAddr, static_cast<VkDevice>(device), device ? getDeviceProcAddr : nullptr);
+    }
+
+    // This interface is designed to be used for per-device function pointers in combination with a linked vulkan library.
+    template <typename DynamicLoader
+#if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL
+      = VULKAN_HPP_NAMESPACE::DynamicLoader
+#endif
+    >
+    void init(VULKAN_HPP_NAMESPACE::Instance const& instance, VULKAN_HPP_NAMESPACE::Device const& device) VULKAN_HPP_NOEXCEPT
+    {
+      static DynamicLoader dl;
+      init(instance, device, dl);
+    }
+#endif // !defined(VK_NO_PROTOTYPES)
+
+    DispatchLoaderDynamic(PFN_vkGetInstanceProcAddr getInstanceProcAddr) VULKAN_HPP_NOEXCEPT
+    {
+      init(getInstanceProcAddr);
+    }
+
+    void init( PFN_vkGetInstanceProcAddr getInstanceProcAddr ) VULKAN_HPP_NOEXCEPT
+    {
+      VULKAN_HPP_ASSERT(getInstanceProcAddr);
+
+      vkGetInstanceProcAddr = getInstanceProcAddr;
+      vkCreateInstance = PFN_vkCreateInstance( vkGetInstanceProcAddr( NULL, "vkCreateInstance" ) );
+      vkEnumerateInstanceExtensionProperties = PFN_vkEnumerateInstanceExtensionProperties( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceExtensionProperties" ) );
+      vkEnumerateInstanceLayerProperties = PFN_vkEnumerateInstanceLayerProperties( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceLayerProperties" ) );
+      vkEnumerateInstanceVersion = PFN_vkEnumerateInstanceVersion( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceVersion" ) );
+    }
+
+    // This interface does not require a linked vulkan library.
+    DispatchLoaderDynamic( VkInstance instance, PFN_vkGetInstanceProcAddr getInstanceProcAddr, VkDevice device = VK_NULL_HANDLE, PFN_vkGetDeviceProcAddr getDeviceProcAddr = nullptr ) VULKAN_HPP_NOEXCEPT
+    {
+      init( instance, getInstanceProcAddr, device, getDeviceProcAddr );
+    }
+
+    // This interface does not require a linked vulkan library.
+    void init( VkInstance instance, PFN_vkGetInstanceProcAddr getInstanceProcAddr, VkDevice device = VK_NULL_HANDLE, PFN_vkGetDeviceProcAddr /*getDeviceProcAddr*/ = nullptr ) VULKAN_HPP_NOEXCEPT
+    {
+      VULKAN_HPP_ASSERT(instance && getInstanceProcAddr);
+      vkGetInstanceProcAddr = getInstanceProcAddr;
+      init( VULKAN_HPP_NAMESPACE::Instance(instance) );
+      if (device) {
+        init( VULKAN_HPP_NAMESPACE::Device(device) );
       }
     }
 
-    void init(Instance instance, Device device = Device())
+    void init( VULKAN_HPP_NAMESPACE::Instance instanceCpp ) VULKAN_HPP_NOEXCEPT
     {
-      vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR(device ? device.getProcAddr( "vkAcquireNextImage2KHR") : instance.getProcAddr( "vkAcquireNextImage2KHR"));
-      vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR(device ? device.getProcAddr( "vkAcquireNextImageKHR") : instance.getProcAddr( "vkAcquireNextImageKHR"));
-#ifdef VK_USE_PLATFORM_XLIB_XRANDR_NV
-      vkAcquireXlibDisplayEXT = PFN_vkAcquireXlibDisplayEXT(instance.getProcAddr( "vkAcquireXlibDisplayEXT"));
-#endif /*VK_USE_PLATFORM_XLIB_XRANDR_NV*/
-      vkAllocateCommandBuffers = PFN_vkAllocateCommandBuffers(device ? device.getProcAddr( "vkAllocateCommandBuffers") : instance.getProcAddr( "vkAllocateCommandBuffers"));
-      vkAllocateDescriptorSets = PFN_vkAllocateDescriptorSets(device ? device.getProcAddr( "vkAllocateDescriptorSets") : instance.getProcAddr( "vkAllocateDescriptorSets"));
-      vkAllocateMemory = PFN_vkAllocateMemory(device ? device.getProcAddr( "vkAllocateMemory") : instance.getProcAddr( "vkAllocateMemory"));
-      vkBeginCommandBuffer = PFN_vkBeginCommandBuffer(device ? device.getProcAddr( "vkBeginCommandBuffer") : instance.getProcAddr( "vkBeginCommandBuffer"));
-      vkBindAccelerationStructureMemoryNV = PFN_vkBindAccelerationStructureMemoryNV(device ? device.getProcAddr( "vkBindAccelerationStructureMemoryNV") : instance.getProcAddr( "vkBindAccelerationStructureMemoryNV"));
-      vkBindBufferMemory = PFN_vkBindBufferMemory(device ? device.getProcAddr( "vkBindBufferMemory") : instance.getProcAddr( "vkBindBufferMemory"));
-      vkBindBufferMemory2 = PFN_vkBindBufferMemory2(device ? device.getProcAddr( "vkBindBufferMemory2") : instance.getProcAddr( "vkBindBufferMemory2"));
-      vkBindBufferMemory2KHR = PFN_vkBindBufferMemory2KHR(device ? device.getProcAddr( "vkBindBufferMemory2KHR") : instance.getProcAddr( "vkBindBufferMemory2KHR"));
-      vkBindImageMemory = PFN_vkBindImageMemory(device ? device.getProcAddr( "vkBindImageMemory") : instance.getProcAddr( "vkBindImageMemory"));
-      vkBindImageMemory2 = PFN_vkBindImageMemory2(device ? device.getProcAddr( "vkBindImageMemory2") : instance.getProcAddr( "vkBindImageMemory2"));
-      vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR(device ? device.getProcAddr( "vkBindImageMemory2KHR") : instance.getProcAddr( "vkBindImageMemory2KHR"));
-      vkCmdBeginConditionalRenderingEXT = PFN_vkCmdBeginConditionalRenderingEXT(device ? device.getProcAddr( "vkCmdBeginConditionalRenderingEXT") : instance.getProcAddr( "vkCmdBeginConditionalRenderingEXT"));
-      vkCmdBeginDebugUtilsLabelEXT = PFN_vkCmdBeginDebugUtilsLabelEXT(device ? device.getProcAddr( "vkCmdBeginDebugUtilsLabelEXT") : instance.getProcAddr( "vkCmdBeginDebugUtilsLabelEXT"));
-      vkCmdBeginQuery = PFN_vkCmdBeginQuery(device ? device.getProcAddr( "vkCmdBeginQuery") : instance.getProcAddr( "vkCmdBeginQuery"));
-      vkCmdBeginQueryIndexedEXT = PFN_vkCmdBeginQueryIndexedEXT(device ? device.getProcAddr( "vkCmdBeginQueryIndexedEXT") : instance.getProcAddr( "vkCmdBeginQueryIndexedEXT"));
-      vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass(device ? device.getProcAddr( "vkCmdBeginRenderPass") : instance.getProcAddr( "vkCmdBeginRenderPass"));
-      vkCmdBeginRenderPass2KHR = PFN_vkCmdBeginRenderPass2KHR(device ? device.getProcAddr( "vkCmdBeginRenderPass2KHR") : instance.getProcAddr( "vkCmdBeginRenderPass2KHR"));
-      vkCmdBeginTransformFeedbackEXT = PFN_vkCmdBeginTransformFeedbackEXT(device ? device.getProcAddr( "vkCmdBeginTransformFeedbackEXT") : instance.getProcAddr( "vkCmdBeginTransformFeedbackEXT"));
-      vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets(device ? device.getProcAddr( "vkCmdBindDescriptorSets") : instance.getProcAddr( "vkCmdBindDescriptorSets"));
-      vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer(device ? device.getProcAddr( "vkCmdBindIndexBuffer") : instance.getProcAddr( "vkCmdBindIndexBuffer"));
-      vkCmdBindPipeline = PFN_vkCmdBindPipeline(device ? device.getProcAddr( "vkCmdBindPipeline") : instance.getProcAddr( "vkCmdBindPipeline"));
-      vkCmdBindShadingRateImageNV = PFN_vkCmdBindShadingRateImageNV(device ? device.getProcAddr( "vkCmdBindShadingRateImageNV") : instance.getProcAddr( "vkCmdBindShadingRateImageNV"));
-      vkCmdBindTransformFeedbackBuffersEXT = PFN_vkCmdBindTransformFeedbackBuffersEXT(device ? device.getProcAddr( "vkCmdBindTransformFeedbackBuffersEXT") : instance.getProcAddr( "vkCmdBindTransformFeedbackBuffersEXT"));
-      vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers(device ? device.getProcAddr( "vkCmdBindVertexBuffers") : instance.getProcAddr( "vkCmdBindVertexBuffers"));
-      vkCmdBlitImage = PFN_vkCmdBlitImage(device ? device.getProcAddr( "vkCmdBlitImage") : instance.getProcAddr( "vkCmdBlitImage"));
-      vkCmdBuildAccelerationStructureNV = PFN_vkCmdBuildAccelerationStructureNV(device ? device.getProcAddr( "vkCmdBuildAccelerationStructureNV") : instance.getProcAddr( "vkCmdBuildAccelerationStructureNV"));
-      vkCmdClearAttachments = PFN_vkCmdClearAttachments(device ? device.getProcAddr( "vkCmdClearAttachments") : instance.getProcAddr( "vkCmdClearAttachments"));
-      vkCmdClearColorImage = PFN_vkCmdClearColorImage(device ? device.getProcAddr( "vkCmdClearColorImage") : instance.getProcAddr( "vkCmdClearColorImage"));
-      vkCmdClearDepthStencilImage = PFN_vkCmdClearDepthStencilImage(device ? device.getProcAddr( "vkCmdClearDepthStencilImage") : instance.getProcAddr( "vkCmdClearDepthStencilImage"));
-      vkCmdCopyAccelerationStructureNV = PFN_vkCmdCopyAccelerationStructureNV(device ? device.getProcAddr( "vkCmdCopyAccelerationStructureNV") : instance.getProcAddr( "vkCmdCopyAccelerationStructureNV"));
-      vkCmdCopyBuffer = PFN_vkCmdCopyBuffer(device ? device.getProcAddr( "vkCmdCopyBuffer") : instance.getProcAddr( "vkCmdCopyBuffer"));
-      vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage(device ? device.getProcAddr( "vkCmdCopyBufferToImage") : instance.getProcAddr( "vkCmdCopyBufferToImage"));
-      vkCmdCopyImage = PFN_vkCmdCopyImage(device ? device.getProcAddr( "vkCmdCopyImage") : instance.getProcAddr( "vkCmdCopyImage"));
-      vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer(device ? device.getProcAddr( "vkCmdCopyImageToBuffer") : instance.getProcAddr( "vkCmdCopyImageToBuffer"));
-      vkCmdCopyQueryPoolResults = PFN_vkCmdCopyQueryPoolResults(device ? device.getProcAddr( "vkCmdCopyQueryPoolResults") : instance.getProcAddr( "vkCmdCopyQueryPoolResults"));
-      vkCmdDebugMarkerBeginEXT = PFN_vkCmdDebugMarkerBeginEXT(device ? device.getProcAddr( "vkCmdDebugMarkerBeginEXT") : instance.getProcAddr( "vkCmdDebugMarkerBeginEXT"));
-      vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT(device ? device.getProcAddr( "vkCmdDebugMarkerEndEXT") : instance.getProcAddr( "vkCmdDebugMarkerEndEXT"));
-      vkCmdDebugMarkerInsertEXT = PFN_vkCmdDebugMarkerInsertEXT(device ? device.getProcAddr( "vkCmdDebugMarkerInsertEXT") : instance.getProcAddr( "vkCmdDebugMarkerInsertEXT"));
-      vkCmdDispatch = PFN_vkCmdDispatch(device ? device.getProcAddr( "vkCmdDispatch") : instance.getProcAddr( "vkCmdDispatch"));
-      vkCmdDispatchBase = PFN_vkCmdDispatchBase(device ? device.getProcAddr( "vkCmdDispatchBase") : instance.getProcAddr( "vkCmdDispatchBase"));
-      vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR(device ? device.getProcAddr( "vkCmdDispatchBaseKHR") : instance.getProcAddr( "vkCmdDispatchBaseKHR"));
-      vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect(device ? device.getProcAddr( "vkCmdDispatchIndirect") : instance.getProcAddr( "vkCmdDispatchIndirect"));
-      vkCmdDraw = PFN_vkCmdDraw(device ? device.getProcAddr( "vkCmdDraw") : instance.getProcAddr( "vkCmdDraw"));
-      vkCmdDrawIndexed = PFN_vkCmdDrawIndexed(device ? device.getProcAddr( "vkCmdDrawIndexed") : instance.getProcAddr( "vkCmdDrawIndexed"));
-      vkCmdDrawIndexedIndirect = PFN_vkCmdDrawIndexedIndirect(device ? device.getProcAddr( "vkCmdDrawIndexedIndirect") : instance.getProcAddr( "vkCmdDrawIndexedIndirect"));
-      vkCmdDrawIndexedIndirectCountAMD = PFN_vkCmdDrawIndexedIndirectCountAMD(device ? device.getProcAddr( "vkCmdDrawIndexedIndirectCountAMD") : instance.getProcAddr( "vkCmdDrawIndexedIndirectCountAMD"));
-      vkCmdDrawIndexedIndirectCountKHR = PFN_vkCmdDrawIndexedIndirectCountKHR(device ? device.getProcAddr( "vkCmdDrawIndexedIndirectCountKHR") : instance.getProcAddr( "vkCmdDrawIndexedIndirectCountKHR"));
-      vkCmdDrawIndirect = PFN_vkCmdDrawIndirect(device ? device.getProcAddr( "vkCmdDrawIndirect") : instance.getProcAddr( "vkCmdDrawIndirect"));
-      vkCmdDrawIndirectByteCountEXT = PFN_vkCmdDrawIndirectByteCountEXT(device ? device.getProcAddr( "vkCmdDrawIndirectByteCountEXT") : instance.getProcAddr( "vkCmdDrawIndirectByteCountEXT"));
-      vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD(device ? device.getProcAddr( "vkCmdDrawIndirectCountAMD") : instance.getProcAddr( "vkCmdDrawIndirectCountAMD"));
-      vkCmdDrawIndirectCountKHR = PFN_vkCmdDrawIndirectCountKHR(device ? device.getProcAddr( "vkCmdDrawIndirectCountKHR") : instance.getProcAddr( "vkCmdDrawIndirectCountKHR"));
-      vkCmdDrawMeshTasksIndirectCountNV = PFN_vkCmdDrawMeshTasksIndirectCountNV(device ? device.getProcAddr( "vkCmdDrawMeshTasksIndirectCountNV") : instance.getProcAddr( "vkCmdDrawMeshTasksIndirectCountNV"));
-      vkCmdDrawMeshTasksIndirectNV = PFN_vkCmdDrawMeshTasksIndirectNV(device ? device.getProcAddr( "vkCmdDrawMeshTasksIndirectNV") : instance.getProcAddr( "vkCmdDrawMeshTasksIndirectNV"));
-      vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV(device ? device.getProcAddr( "vkCmdDrawMeshTasksNV") : instance.getProcAddr( "vkCmdDrawMeshTasksNV"));
-      vkCmdEndConditionalRenderingEXT = PFN_vkCmdEndConditionalRenderingEXT(device ? device.getProcAddr( "vkCmdEndConditionalRenderingEXT") : instance.getProcAddr( "vkCmdEndConditionalRenderingEXT"));
-      vkCmdEndDebugUtilsLabelEXT = PFN_vkCmdEndDebugUtilsLabelEXT(device ? device.getProcAddr( "vkCmdEndDebugUtilsLabelEXT") : instance.getProcAddr( "vkCmdEndDebugUtilsLabelEXT"));
-      vkCmdEndQuery = PFN_vkCmdEndQuery(device ? device.getProcAddr( "vkCmdEndQuery") : instance.getProcAddr( "vkCmdEndQuery"));
-      vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT(device ? device.getProcAddr( "vkCmdEndQueryIndexedEXT") : instance.getProcAddr( "vkCmdEndQueryIndexedEXT"));
-      vkCmdEndRenderPass = PFN_vkCmdEndRenderPass(device ? device.getProcAddr( "vkCmdEndRenderPass") : instance.getProcAddr( "vkCmdEndRenderPass"));
-      vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR(device ? device.getProcAddr( "vkCmdEndRenderPass2KHR") : instance.getProcAddr( "vkCmdEndRenderPass2KHR"));
-      vkCmdEndTransformFeedbackEXT = PFN_vkCmdEndTransformFeedbackEXT(device ? device.getProcAddr( "vkCmdEndTransformFeedbackEXT") : instance.getProcAddr( "vkCmdEndTransformFeedbackEXT"));
-      vkCmdExecuteCommands = PFN_vkCmdExecuteCommands(device ? device.getProcAddr( "vkCmdExecuteCommands") : instance.getProcAddr( "vkCmdExecuteCommands"));
-      vkCmdFillBuffer = PFN_vkCmdFillBuffer(device ? device.getProcAddr( "vkCmdFillBuffer") : instance.getProcAddr( "vkCmdFillBuffer"));
-      vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT(device ? device.getProcAddr( "vkCmdInsertDebugUtilsLabelEXT") : instance.getProcAddr( "vkCmdInsertDebugUtilsLabelEXT"));
-      vkCmdNextSubpass = PFN_vkCmdNextSubpass(device ? device.getProcAddr( "vkCmdNextSubpass") : instance.getProcAddr( "vkCmdNextSubpass"));
-      vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR(device ? device.getProcAddr( "vkCmdNextSubpass2KHR") : instance.getProcAddr( "vkCmdNextSubpass2KHR"));
-      vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier(device ? device.getProcAddr( "vkCmdPipelineBarrier") : instance.getProcAddr( "vkCmdPipelineBarrier"));
-      vkCmdProcessCommandsNVX = PFN_vkCmdProcessCommandsNVX(device ? device.getProcAddr( "vkCmdProcessCommandsNVX") : instance.getProcAddr( "vkCmdProcessCommandsNVX"));
-      vkCmdPushConstants = PFN_vkCmdPushConstants(device ? device.getProcAddr( "vkCmdPushConstants") : instance.getProcAddr( "vkCmdPushConstants"));
-      vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR(device ? device.getProcAddr( "vkCmdPushDescriptorSetKHR") : instance.getProcAddr( "vkCmdPushDescriptorSetKHR"));
-      vkCmdPushDescriptorSetWithTemplateKHR = PFN_vkCmdPushDescriptorSetWithTemplateKHR(device ? device.getProcAddr( "vkCmdPushDescriptorSetWithTemplateKHR") : instance.getProcAddr( "vkCmdPushDescriptorSetWithTemplateKHR"));
-      vkCmdReserveSpaceForCommandsNVX = PFN_vkCmdReserveSpaceForCommandsNVX(device ? device.getProcAddr( "vkCmdReserveSpaceForCommandsNVX") : instance.getProcAddr( "vkCmdReserveSpaceForCommandsNVX"));
-      vkCmdResetEvent = PFN_vkCmdResetEvent(device ? device.getProcAddr( "vkCmdResetEvent") : instance.getProcAddr( "vkCmdResetEvent"));
-      vkCmdResetQueryPool = PFN_vkCmdResetQueryPool(device ? device.getProcAddr( "vkCmdResetQueryPool") : instance.getProcAddr( "vkCmdResetQueryPool"));
-      vkCmdResolveImage = PFN_vkCmdResolveImage(device ? device.getProcAddr( "vkCmdResolveImage") : instance.getProcAddr( "vkCmdResolveImage"));
-      vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants(device ? device.getProcAddr( "vkCmdSetBlendConstants") : instance.getProcAddr( "vkCmdSetBlendConstants"));
-      vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV(device ? device.getProcAddr( "vkCmdSetCheckpointNV") : instance.getProcAddr( "vkCmdSetCheckpointNV"));
-      vkCmdSetCoarseSampleOrderNV = PFN_vkCmdSetCoarseSampleOrderNV(device ? device.getProcAddr( "vkCmdSetCoarseSampleOrderNV") : instance.getProcAddr( "vkCmdSetCoarseSampleOrderNV"));
-      vkCmdSetDepthBias = PFN_vkCmdSetDepthBias(device ? device.getProcAddr( "vkCmdSetDepthBias") : instance.getProcAddr( "vkCmdSetDepthBias"));
-      vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds(device ? device.getProcAddr( "vkCmdSetDepthBounds") : instance.getProcAddr( "vkCmdSetDepthBounds"));
-      vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask(device ? device.getProcAddr( "vkCmdSetDeviceMask") : instance.getProcAddr( "vkCmdSetDeviceMask"));
-      vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR(device ? device.getProcAddr( "vkCmdSetDeviceMaskKHR") : instance.getProcAddr( "vkCmdSetDeviceMaskKHR"));
-      vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT(device ? device.getProcAddr( "vkCmdSetDiscardRectangleEXT") : instance.getProcAddr( "vkCmdSetDiscardRectangleEXT"));
-      vkCmdSetEvent = PFN_vkCmdSetEvent(device ? device.getProcAddr( "vkCmdSetEvent") : instance.getProcAddr( "vkCmdSetEvent"));
-      vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV(device ? device.getProcAddr( "vkCmdSetExclusiveScissorNV") : instance.getProcAddr( "vkCmdSetExclusiveScissorNV"));
-      vkCmdSetLineWidth = PFN_vkCmdSetLineWidth(device ? device.getProcAddr( "vkCmdSetLineWidth") : instance.getProcAddr( "vkCmdSetLineWidth"));
-      vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT(device ? device.getProcAddr( "vkCmdSetSampleLocationsEXT") : instance.getProcAddr( "vkCmdSetSampleLocationsEXT"));
-      vkCmdSetScissor = PFN_vkCmdSetScissor(device ? device.getProcAddr( "vkCmdSetScissor") : instance.getProcAddr( "vkCmdSetScissor"));
-      vkCmdSetStencilCompareMask = PFN_vkCmdSetStencilCompareMask(device ? device.getProcAddr( "vkCmdSetStencilCompareMask") : instance.getProcAddr( "vkCmdSetStencilCompareMask"));
-      vkCmdSetStencilReference = PFN_vkCmdSetStencilReference(device ? device.getProcAddr( "vkCmdSetStencilReference") : instance.getProcAddr( "vkCmdSetStencilReference"));
-      vkCmdSetStencilWriteMask = PFN_vkCmdSetStencilWriteMask(device ? device.getProcAddr( "vkCmdSetStencilWriteMask") : instance.getProcAddr( "vkCmdSetStencilWriteMask"));
-      vkCmdSetViewport = PFN_vkCmdSetViewport(device ? device.getProcAddr( "vkCmdSetViewport") : instance.getProcAddr( "vkCmdSetViewport"));
-      vkCmdSetViewportShadingRatePaletteNV = PFN_vkCmdSetViewportShadingRatePaletteNV(device ? device.getProcAddr( "vkCmdSetViewportShadingRatePaletteNV") : instance.getProcAddr( "vkCmdSetViewportShadingRatePaletteNV"));
-      vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV(device ? device.getProcAddr( "vkCmdSetViewportWScalingNV") : instance.getProcAddr( "vkCmdSetViewportWScalingNV"));
-      vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV(device ? device.getProcAddr( "vkCmdTraceRaysNV") : instance.getProcAddr( "vkCmdTraceRaysNV"));
-      vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer(device ? device.getProcAddr( "vkCmdUpdateBuffer") : instance.getProcAddr( "vkCmdUpdateBuffer"));
-      vkCmdWaitEvents = PFN_vkCmdWaitEvents(device ? device.getProcAddr( "vkCmdWaitEvents") : instance.getProcAddr( "vkCmdWaitEvents"));
-      vkCmdWriteAccelerationStructuresPropertiesNV = PFN_vkCmdWriteAccelerationStructuresPropertiesNV(device ? device.getProcAddr( "vkCmdWriteAccelerationStructuresPropertiesNV") : instance.getProcAddr( "vkCmdWriteAccelerationStructuresPropertiesNV"));
-      vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD(device ? device.getProcAddr( "vkCmdWriteBufferMarkerAMD") : instance.getProcAddr( "vkCmdWriteBufferMarkerAMD"));
-      vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp(device ? device.getProcAddr( "vkCmdWriteTimestamp") : instance.getProcAddr( "vkCmdWriteTimestamp"));
-      vkCompileDeferredNV = PFN_vkCompileDeferredNV(device ? device.getProcAddr( "vkCompileDeferredNV") : instance.getProcAddr( "vkCompileDeferredNV"));
-      vkCreateAccelerationStructureNV = PFN_vkCreateAccelerationStructureNV(device ? device.getProcAddr( "vkCreateAccelerationStructureNV") : instance.getProcAddr( "vkCreateAccelerationStructureNV"));
+      VkInstance instance = static_cast<VkInstance>(instanceCpp);
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+      vkAcquireXlibDisplayEXT = PFN_vkAcquireXlibDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireXlibDisplayEXT" ) );
+#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
 #ifdef VK_USE_PLATFORM_ANDROID_KHR
-      vkCreateAndroidSurfaceKHR = PFN_vkCreateAndroidSurfaceKHR(instance.getProcAddr( "vkCreateAndroidSurfaceKHR"));
+      vkCreateAndroidSurfaceKHR = PFN_vkCreateAndroidSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateAndroidSurfaceKHR" ) );
 #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
-      vkCreateBuffer = PFN_vkCreateBuffer(device ? device.getProcAddr( "vkCreateBuffer") : instance.getProcAddr( "vkCreateBuffer"));
-      vkCreateBufferView = PFN_vkCreateBufferView(device ? device.getProcAddr( "vkCreateBufferView") : instance.getProcAddr( "vkCreateBufferView"));
-      vkCreateCommandPool = PFN_vkCreateCommandPool(device ? device.getProcAddr( "vkCreateCommandPool") : instance.getProcAddr( "vkCreateCommandPool"));
-      vkCreateComputePipelines = PFN_vkCreateComputePipelines(device ? device.getProcAddr( "vkCreateComputePipelines") : instance.getProcAddr( "vkCreateComputePipelines"));
-      vkCreateDebugReportCallbackEXT = PFN_vkCreateDebugReportCallbackEXT(instance.getProcAddr( "vkCreateDebugReportCallbackEXT"));
-      vkCreateDebugUtilsMessengerEXT = PFN_vkCreateDebugUtilsMessengerEXT(instance.getProcAddr( "vkCreateDebugUtilsMessengerEXT"));
-      vkCreateDescriptorPool = PFN_vkCreateDescriptorPool(device ? device.getProcAddr( "vkCreateDescriptorPool") : instance.getProcAddr( "vkCreateDescriptorPool"));
-      vkCreateDescriptorSetLayout = PFN_vkCreateDescriptorSetLayout(device ? device.getProcAddr( "vkCreateDescriptorSetLayout") : instance.getProcAddr( "vkCreateDescriptorSetLayout"));
-      vkCreateDescriptorUpdateTemplate = PFN_vkCreateDescriptorUpdateTemplate(device ? device.getProcAddr( "vkCreateDescriptorUpdateTemplate") : instance.getProcAddr( "vkCreateDescriptorUpdateTemplate"));
-      vkCreateDescriptorUpdateTemplateKHR = PFN_vkCreateDescriptorUpdateTemplateKHR(device ? device.getProcAddr( "vkCreateDescriptorUpdateTemplateKHR") : instance.getProcAddr( "vkCreateDescriptorUpdateTemplateKHR"));
-      vkCreateDevice = PFN_vkCreateDevice(instance.getProcAddr( "vkCreateDevice"));
-      vkCreateDisplayModeKHR = PFN_vkCreateDisplayModeKHR(instance.getProcAddr( "vkCreateDisplayModeKHR"));
-      vkCreateDisplayPlaneSurfaceKHR = PFN_vkCreateDisplayPlaneSurfaceKHR(instance.getProcAddr( "vkCreateDisplayPlaneSurfaceKHR"));
-      vkCreateEvent = PFN_vkCreateEvent(device ? device.getProcAddr( "vkCreateEvent") : instance.getProcAddr( "vkCreateEvent"));
-      vkCreateFence = PFN_vkCreateFence(device ? device.getProcAddr( "vkCreateFence") : instance.getProcAddr( "vkCreateFence"));
-      vkCreateFramebuffer = PFN_vkCreateFramebuffer(device ? device.getProcAddr( "vkCreateFramebuffer") : instance.getProcAddr( "vkCreateFramebuffer"));
-      vkCreateGraphicsPipelines = PFN_vkCreateGraphicsPipelines(device ? device.getProcAddr( "vkCreateGraphicsPipelines") : instance.getProcAddr( "vkCreateGraphicsPipelines"));
+      vkCreateDebugReportCallbackEXT = PFN_vkCreateDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugReportCallbackEXT" ) );
+      vkCreateDebugUtilsMessengerEXT = PFN_vkCreateDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugUtilsMessengerEXT" ) );
+      vkCreateDevice = PFN_vkCreateDevice( vkGetInstanceProcAddr( instance, "vkCreateDevice" ) );
+#ifdef VK_USE_PLATFORM_DIRECTFB_EXT
+      vkCreateDirectFBSurfaceEXT = PFN_vkCreateDirectFBSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateDirectFBSurfaceEXT" ) );
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
+      vkCreateDisplayModeKHR = PFN_vkCreateDisplayModeKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayModeKHR" ) );
+      vkCreateDisplayPlaneSurfaceKHR = PFN_vkCreateDisplayPlaneSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayPlaneSurfaceKHR" ) );
+      vkCreateHeadlessSurfaceEXT = PFN_vkCreateHeadlessSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateHeadlessSurfaceEXT" ) );
 #ifdef VK_USE_PLATFORM_IOS_MVK
-      vkCreateIOSSurfaceMVK = PFN_vkCreateIOSSurfaceMVK(instance.getProcAddr( "vkCreateIOSSurfaceMVK"));
+      vkCreateIOSSurfaceMVK = PFN_vkCreateIOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateIOSSurfaceMVK" ) );
 #endif /*VK_USE_PLATFORM_IOS_MVK*/
-      vkCreateImage = PFN_vkCreateImage(device ? device.getProcAddr( "vkCreateImage") : instance.getProcAddr( "vkCreateImage"));
-#ifdef VK_USE_PLATFORM_FUCHSIA_FUCHSIA
-      vkCreateImagePipeSurfaceFUCHSIA = PFN_vkCreateImagePipeSurfaceFUCHSIA(instance.getProcAddr( "vkCreateImagePipeSurfaceFUCHSIA"));
-#endif /*VK_USE_PLATFORM_FUCHSIA_FUCHSIA*/
-      vkCreateImageView = PFN_vkCreateImageView(device ? device.getProcAddr( "vkCreateImageView") : instance.getProcAddr( "vkCreateImageView"));
-      vkCreateIndirectCommandsLayoutNVX = PFN_vkCreateIndirectCommandsLayoutNVX(device ? device.getProcAddr( "vkCreateIndirectCommandsLayoutNVX") : instance.getProcAddr( "vkCreateIndirectCommandsLayoutNVX"));
-      vkCreateInstance = PFN_vkCreateInstance(instance.getProcAddr( "vkCreateInstance"));
+#ifdef VK_USE_PLATFORM_FUCHSIA
+      vkCreateImagePipeSurfaceFUCHSIA = PFN_vkCreateImagePipeSurfaceFUCHSIA( vkGetInstanceProcAddr( instance, "vkCreateImagePipeSurfaceFUCHSIA" ) );
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
 #ifdef VK_USE_PLATFORM_MACOS_MVK
-      vkCreateMacOSSurfaceMVK = PFN_vkCreateMacOSSurfaceMVK(instance.getProcAddr( "vkCreateMacOSSurfaceMVK"));
+      vkCreateMacOSSurfaceMVK = PFN_vkCreateMacOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateMacOSSurfaceMVK" ) );
 #endif /*VK_USE_PLATFORM_MACOS_MVK*/
-      vkCreateObjectTableNVX = PFN_vkCreateObjectTableNVX(device ? device.getProcAddr( "vkCreateObjectTableNVX") : instance.getProcAddr( "vkCreateObjectTableNVX"));
-      vkCreatePipelineCache = PFN_vkCreatePipelineCache(device ? device.getProcAddr( "vkCreatePipelineCache") : instance.getProcAddr( "vkCreatePipelineCache"));
-      vkCreatePipelineLayout = PFN_vkCreatePipelineLayout(device ? device.getProcAddr( "vkCreatePipelineLayout") : instance.getProcAddr( "vkCreatePipelineLayout"));
-      vkCreateQueryPool = PFN_vkCreateQueryPool(device ? device.getProcAddr( "vkCreateQueryPool") : instance.getProcAddr( "vkCreateQueryPool"));
-      vkCreateRayTracingPipelinesNV = PFN_vkCreateRayTracingPipelinesNV(device ? device.getProcAddr( "vkCreateRayTracingPipelinesNV") : instance.getProcAddr( "vkCreateRayTracingPipelinesNV"));
-      vkCreateRenderPass = PFN_vkCreateRenderPass(device ? device.getProcAddr( "vkCreateRenderPass") : instance.getProcAddr( "vkCreateRenderPass"));
-      vkCreateRenderPass2KHR = PFN_vkCreateRenderPass2KHR(device ? device.getProcAddr( "vkCreateRenderPass2KHR") : instance.getProcAddr( "vkCreateRenderPass2KHR"));
-      vkCreateSampler = PFN_vkCreateSampler(device ? device.getProcAddr( "vkCreateSampler") : instance.getProcAddr( "vkCreateSampler"));
-      vkCreateSamplerYcbcrConversion = PFN_vkCreateSamplerYcbcrConversion(device ? device.getProcAddr( "vkCreateSamplerYcbcrConversion") : instance.getProcAddr( "vkCreateSamplerYcbcrConversion"));
-      vkCreateSamplerYcbcrConversionKHR = PFN_vkCreateSamplerYcbcrConversionKHR(device ? device.getProcAddr( "vkCreateSamplerYcbcrConversionKHR") : instance.getProcAddr( "vkCreateSamplerYcbcrConversionKHR"));
-      vkCreateSemaphore = PFN_vkCreateSemaphore(device ? device.getProcAddr( "vkCreateSemaphore") : instance.getProcAddr( "vkCreateSemaphore"));
-      vkCreateShaderModule = PFN_vkCreateShaderModule(device ? device.getProcAddr( "vkCreateShaderModule") : instance.getProcAddr( "vkCreateShaderModule"));
-      vkCreateSharedSwapchainsKHR = PFN_vkCreateSharedSwapchainsKHR(device ? device.getProcAddr( "vkCreateSharedSwapchainsKHR") : instance.getProcAddr( "vkCreateSharedSwapchainsKHR"));
-      vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR(device ? device.getProcAddr( "vkCreateSwapchainKHR") : instance.getProcAddr( "vkCreateSwapchainKHR"));
-      vkCreateValidationCacheEXT = PFN_vkCreateValidationCacheEXT(device ? device.getProcAddr( "vkCreateValidationCacheEXT") : instance.getProcAddr( "vkCreateValidationCacheEXT"));
+#ifdef VK_USE_PLATFORM_METAL_EXT
+      vkCreateMetalSurfaceEXT = PFN_vkCreateMetalSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateMetalSurfaceEXT" ) );
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+#ifdef VK_USE_PLATFORM_GGP
+      vkCreateStreamDescriptorSurfaceGGP = PFN_vkCreateStreamDescriptorSurfaceGGP( vkGetInstanceProcAddr( instance, "vkCreateStreamDescriptorSurfaceGGP" ) );
+#endif /*VK_USE_PLATFORM_GGP*/
 #ifdef VK_USE_PLATFORM_VI_NN
-      vkCreateViSurfaceNN = PFN_vkCreateViSurfaceNN(instance.getProcAddr( "vkCreateViSurfaceNN"));
+      vkCreateViSurfaceNN = PFN_vkCreateViSurfaceNN( vkGetInstanceProcAddr( instance, "vkCreateViSurfaceNN" ) );
 #endif /*VK_USE_PLATFORM_VI_NN*/
 #ifdef VK_USE_PLATFORM_WAYLAND_KHR
-      vkCreateWaylandSurfaceKHR = PFN_vkCreateWaylandSurfaceKHR(instance.getProcAddr( "vkCreateWaylandSurfaceKHR"));
+      vkCreateWaylandSurfaceKHR = PFN_vkCreateWaylandSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWaylandSurfaceKHR" ) );
 #endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
 #ifdef VK_USE_PLATFORM_WIN32_KHR
-      vkCreateWin32SurfaceKHR = PFN_vkCreateWin32SurfaceKHR(instance.getProcAddr( "vkCreateWin32SurfaceKHR"));
+      vkCreateWin32SurfaceKHR = PFN_vkCreateWin32SurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWin32SurfaceKHR" ) );
 #endif /*VK_USE_PLATFORM_WIN32_KHR*/
 #ifdef VK_USE_PLATFORM_XCB_KHR
-      vkCreateXcbSurfaceKHR = PFN_vkCreateXcbSurfaceKHR(instance.getProcAddr( "vkCreateXcbSurfaceKHR"));
+      vkCreateXcbSurfaceKHR = PFN_vkCreateXcbSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXcbSurfaceKHR" ) );
 #endif /*VK_USE_PLATFORM_XCB_KHR*/
 #ifdef VK_USE_PLATFORM_XLIB_KHR
-      vkCreateXlibSurfaceKHR = PFN_vkCreateXlibSurfaceKHR(instance.getProcAddr( "vkCreateXlibSurfaceKHR"));
+      vkCreateXlibSurfaceKHR = PFN_vkCreateXlibSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXlibSurfaceKHR" ) );
 #endif /*VK_USE_PLATFORM_XLIB_KHR*/
-      vkDebugMarkerSetObjectNameEXT = PFN_vkDebugMarkerSetObjectNameEXT(device ? device.getProcAddr( "vkDebugMarkerSetObjectNameEXT") : instance.getProcAddr( "vkDebugMarkerSetObjectNameEXT"));
-      vkDebugMarkerSetObjectTagEXT = PFN_vkDebugMarkerSetObjectTagEXT(device ? device.getProcAddr( "vkDebugMarkerSetObjectTagEXT") : instance.getProcAddr( "vkDebugMarkerSetObjectTagEXT"));
-      vkDebugReportMessageEXT = PFN_vkDebugReportMessageEXT(instance.getProcAddr( "vkDebugReportMessageEXT"));
-      vkDestroyAccelerationStructureNV = PFN_vkDestroyAccelerationStructureNV(device ? device.getProcAddr( "vkDestroyAccelerationStructureNV") : instance.getProcAddr( "vkDestroyAccelerationStructureNV"));
-      vkDestroyBuffer = PFN_vkDestroyBuffer(device ? device.getProcAddr( "vkDestroyBuffer") : instance.getProcAddr( "vkDestroyBuffer"));
-      vkDestroyBufferView = PFN_vkDestroyBufferView(device ? device.getProcAddr( "vkDestroyBufferView") : instance.getProcAddr( "vkDestroyBufferView"));
-      vkDestroyCommandPool = PFN_vkDestroyCommandPool(device ? device.getProcAddr( "vkDestroyCommandPool") : instance.getProcAddr( "vkDestroyCommandPool"));
-      vkDestroyDebugReportCallbackEXT = PFN_vkDestroyDebugReportCallbackEXT(instance.getProcAddr( "vkDestroyDebugReportCallbackEXT"));
-      vkDestroyDebugUtilsMessengerEXT = PFN_vkDestroyDebugUtilsMessengerEXT(instance.getProcAddr( "vkDestroyDebugUtilsMessengerEXT"));
-      vkDestroyDescriptorPool = PFN_vkDestroyDescriptorPool(device ? device.getProcAddr( "vkDestroyDescriptorPool") : instance.getProcAddr( "vkDestroyDescriptorPool"));
-      vkDestroyDescriptorSetLayout = PFN_vkDestroyDescriptorSetLayout(device ? device.getProcAddr( "vkDestroyDescriptorSetLayout") : instance.getProcAddr( "vkDestroyDescriptorSetLayout"));
-      vkDestroyDescriptorUpdateTemplate = PFN_vkDestroyDescriptorUpdateTemplate(device ? device.getProcAddr( "vkDestroyDescriptorUpdateTemplate") : instance.getProcAddr( "vkDestroyDescriptorUpdateTemplate"));
-      vkDestroyDescriptorUpdateTemplateKHR = PFN_vkDestroyDescriptorUpdateTemplateKHR(device ? device.getProcAddr( "vkDestroyDescriptorUpdateTemplateKHR") : instance.getProcAddr( "vkDestroyDescriptorUpdateTemplateKHR"));
-      vkDestroyDevice = PFN_vkDestroyDevice(device ? device.getProcAddr( "vkDestroyDevice") : instance.getProcAddr( "vkDestroyDevice"));
-      vkDestroyEvent = PFN_vkDestroyEvent(device ? device.getProcAddr( "vkDestroyEvent") : instance.getProcAddr( "vkDestroyEvent"));
-      vkDestroyFence = PFN_vkDestroyFence(device ? device.getProcAddr( "vkDestroyFence") : instance.getProcAddr( "vkDestroyFence"));
-      vkDestroyFramebuffer = PFN_vkDestroyFramebuffer(device ? device.getProcAddr( "vkDestroyFramebuffer") : instance.getProcAddr( "vkDestroyFramebuffer"));
-      vkDestroyImage = PFN_vkDestroyImage(device ? device.getProcAddr( "vkDestroyImage") : instance.getProcAddr( "vkDestroyImage"));
-      vkDestroyImageView = PFN_vkDestroyImageView(device ? device.getProcAddr( "vkDestroyImageView") : instance.getProcAddr( "vkDestroyImageView"));
-      vkDestroyIndirectCommandsLayoutNVX = PFN_vkDestroyIndirectCommandsLayoutNVX(device ? device.getProcAddr( "vkDestroyIndirectCommandsLayoutNVX") : instance.getProcAddr( "vkDestroyIndirectCommandsLayoutNVX"));
-      vkDestroyInstance = PFN_vkDestroyInstance(instance.getProcAddr( "vkDestroyInstance"));
-      vkDestroyObjectTableNVX = PFN_vkDestroyObjectTableNVX(device ? device.getProcAddr( "vkDestroyObjectTableNVX") : instance.getProcAddr( "vkDestroyObjectTableNVX"));
-      vkDestroyPipeline = PFN_vkDestroyPipeline(device ? device.getProcAddr( "vkDestroyPipeline") : instance.getProcAddr( "vkDestroyPipeline"));
-      vkDestroyPipelineCache = PFN_vkDestroyPipelineCache(device ? device.getProcAddr( "vkDestroyPipelineCache") : instance.getProcAddr( "vkDestroyPipelineCache"));
-      vkDestroyPipelineLayout = PFN_vkDestroyPipelineLayout(device ? device.getProcAddr( "vkDestroyPipelineLayout") : instance.getProcAddr( "vkDestroyPipelineLayout"));
-      vkDestroyQueryPool = PFN_vkDestroyQueryPool(device ? device.getProcAddr( "vkDestroyQueryPool") : instance.getProcAddr( "vkDestroyQueryPool"));
-      vkDestroyRenderPass = PFN_vkDestroyRenderPass(device ? device.getProcAddr( "vkDestroyRenderPass") : instance.getProcAddr( "vkDestroyRenderPass"));
-      vkDestroySampler = PFN_vkDestroySampler(device ? device.getProcAddr( "vkDestroySampler") : instance.getProcAddr( "vkDestroySampler"));
-      vkDestroySamplerYcbcrConversion = PFN_vkDestroySamplerYcbcrConversion(device ? device.getProcAddr( "vkDestroySamplerYcbcrConversion") : instance.getProcAddr( "vkDestroySamplerYcbcrConversion"));
-      vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR(device ? device.getProcAddr( "vkDestroySamplerYcbcrConversionKHR") : instance.getProcAddr( "vkDestroySamplerYcbcrConversionKHR"));
-      vkDestroySemaphore = PFN_vkDestroySemaphore(device ? device.getProcAddr( "vkDestroySemaphore") : instance.getProcAddr( "vkDestroySemaphore"));
-      vkDestroyShaderModule = PFN_vkDestroyShaderModule(device ? device.getProcAddr( "vkDestroyShaderModule") : instance.getProcAddr( "vkDestroyShaderModule"));
-      vkDestroySurfaceKHR = PFN_vkDestroySurfaceKHR(instance.getProcAddr( "vkDestroySurfaceKHR"));
-      vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR(device ? device.getProcAddr( "vkDestroySwapchainKHR") : instance.getProcAddr( "vkDestroySwapchainKHR"));
-      vkDestroyValidationCacheEXT = PFN_vkDestroyValidationCacheEXT(device ? device.getProcAddr( "vkDestroyValidationCacheEXT") : instance.getProcAddr( "vkDestroyValidationCacheEXT"));
-      vkDeviceWaitIdle = PFN_vkDeviceWaitIdle(device ? device.getProcAddr( "vkDeviceWaitIdle") : instance.getProcAddr( "vkDeviceWaitIdle"));
-      vkDisplayPowerControlEXT = PFN_vkDisplayPowerControlEXT(device ? device.getProcAddr( "vkDisplayPowerControlEXT") : instance.getProcAddr( "vkDisplayPowerControlEXT"));
-      vkEndCommandBuffer = PFN_vkEndCommandBuffer(device ? device.getProcAddr( "vkEndCommandBuffer") : instance.getProcAddr( "vkEndCommandBuffer"));
-      vkEnumerateDeviceExtensionProperties = PFN_vkEnumerateDeviceExtensionProperties(instance.getProcAddr( "vkEnumerateDeviceExtensionProperties"));
-      vkEnumerateDeviceLayerProperties = PFN_vkEnumerateDeviceLayerProperties(instance.getProcAddr( "vkEnumerateDeviceLayerProperties"));
-      vkEnumerateInstanceExtensionProperties = PFN_vkEnumerateInstanceExtensionProperties(instance.getProcAddr( "vkEnumerateInstanceExtensionProperties"));
-      vkEnumerateInstanceLayerProperties = PFN_vkEnumerateInstanceLayerProperties(instance.getProcAddr( "vkEnumerateInstanceLayerProperties"));
-      vkEnumerateInstanceVersion = PFN_vkEnumerateInstanceVersion(instance.getProcAddr( "vkEnumerateInstanceVersion"));
-      vkEnumeratePhysicalDeviceGroups = PFN_vkEnumeratePhysicalDeviceGroups(instance.getProcAddr( "vkEnumeratePhysicalDeviceGroups"));
-      vkEnumeratePhysicalDeviceGroupsKHR = PFN_vkEnumeratePhysicalDeviceGroupsKHR(instance.getProcAddr( "vkEnumeratePhysicalDeviceGroupsKHR"));
-      vkEnumeratePhysicalDevices = PFN_vkEnumeratePhysicalDevices(instance.getProcAddr( "vkEnumeratePhysicalDevices"));
-      vkFlushMappedMemoryRanges = PFN_vkFlushMappedMemoryRanges(device ? device.getProcAddr( "vkFlushMappedMemoryRanges") : instance.getProcAddr( "vkFlushMappedMemoryRanges"));
-      vkFreeCommandBuffers = PFN_vkFreeCommandBuffers(device ? device.getProcAddr( "vkFreeCommandBuffers") : instance.getProcAddr( "vkFreeCommandBuffers"));
-      vkFreeDescriptorSets = PFN_vkFreeDescriptorSets(device ? device.getProcAddr( "vkFreeDescriptorSets") : instance.getProcAddr( "vkFreeDescriptorSets"));
-      vkFreeMemory = PFN_vkFreeMemory(device ? device.getProcAddr( "vkFreeMemory") : instance.getProcAddr( "vkFreeMemory"));
-      vkGetAccelerationStructureHandleNV = PFN_vkGetAccelerationStructureHandleNV(device ? device.getProcAddr( "vkGetAccelerationStructureHandleNV") : instance.getProcAddr( "vkGetAccelerationStructureHandleNV"));
-      vkGetAccelerationStructureMemoryRequirementsNV = PFN_vkGetAccelerationStructureMemoryRequirementsNV(device ? device.getProcAddr( "vkGetAccelerationStructureMemoryRequirementsNV") : instance.getProcAddr( "vkGetAccelerationStructureMemoryRequirementsNV"));
-#ifdef VK_USE_PLATFORM_ANDROID_ANDROID
-      vkGetAndroidHardwareBufferPropertiesANDROID = PFN_vkGetAndroidHardwareBufferPropertiesANDROID(device ? device.getProcAddr( "vkGetAndroidHardwareBufferPropertiesANDROID") : instance.getProcAddr( "vkGetAndroidHardwareBufferPropertiesANDROID"));
-#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/
-      vkGetBufferMemoryRequirements = PFN_vkGetBufferMemoryRequirements(device ? device.getProcAddr( "vkGetBufferMemoryRequirements") : instance.getProcAddr( "vkGetBufferMemoryRequirements"));
-      vkGetBufferMemoryRequirements2 = PFN_vkGetBufferMemoryRequirements2(device ? device.getProcAddr( "vkGetBufferMemoryRequirements2") : instance.getProcAddr( "vkGetBufferMemoryRequirements2"));
-      vkGetBufferMemoryRequirements2KHR = PFN_vkGetBufferMemoryRequirements2KHR(device ? device.getProcAddr( "vkGetBufferMemoryRequirements2KHR") : instance.getProcAddr( "vkGetBufferMemoryRequirements2KHR"));
-      vkGetCalibratedTimestampsEXT = PFN_vkGetCalibratedTimestampsEXT(device ? device.getProcAddr( "vkGetCalibratedTimestampsEXT") : instance.getProcAddr( "vkGetCalibratedTimestampsEXT"));
-      vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport(device ? device.getProcAddr( "vkGetDescriptorSetLayoutSupport") : instance.getProcAddr( "vkGetDescriptorSetLayoutSupport"));
-      vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR(device ? device.getProcAddr( "vkGetDescriptorSetLayoutSupportKHR") : instance.getProcAddr( "vkGetDescriptorSetLayoutSupportKHR"));
-      vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures(device ? device.getProcAddr( "vkGetDeviceGroupPeerMemoryFeatures") : instance.getProcAddr( "vkGetDeviceGroupPeerMemoryFeatures"));
-      vkGetDeviceGroupPeerMemoryFeaturesKHR = PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR(device ? device.getProcAddr( "vkGetDeviceGroupPeerMemoryFeaturesKHR") : instance.getProcAddr( "vkGetDeviceGroupPeerMemoryFeaturesKHR"));
-      vkGetDeviceGroupPresentCapabilitiesKHR = PFN_vkGetDeviceGroupPresentCapabilitiesKHR(device ? device.getProcAddr( "vkGetDeviceGroupPresentCapabilitiesKHR") : instance.getProcAddr( "vkGetDeviceGroupPresentCapabilitiesKHR"));
-      vkGetDeviceGroupSurfacePresentModesKHR = PFN_vkGetDeviceGroupSurfacePresentModesKHR(device ? device.getProcAddr( "vkGetDeviceGroupSurfacePresentModesKHR") : instance.getProcAddr( "vkGetDeviceGroupSurfacePresentModesKHR"));
-      vkGetDeviceMemoryCommitment = PFN_vkGetDeviceMemoryCommitment(device ? device.getProcAddr( "vkGetDeviceMemoryCommitment") : instance.getProcAddr( "vkGetDeviceMemoryCommitment"));
-      vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr(device ? device.getProcAddr( "vkGetDeviceProcAddr") : instance.getProcAddr( "vkGetDeviceProcAddr"));
-      vkGetDeviceQueue = PFN_vkGetDeviceQueue(device ? device.getProcAddr( "vkGetDeviceQueue") : instance.getProcAddr( "vkGetDeviceQueue"));
-      vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2(device ? device.getProcAddr( "vkGetDeviceQueue2") : instance.getProcAddr( "vkGetDeviceQueue2"));
-      vkGetDisplayModeProperties2KHR = PFN_vkGetDisplayModeProperties2KHR(instance.getProcAddr( "vkGetDisplayModeProperties2KHR"));
-      vkGetDisplayModePropertiesKHR = PFN_vkGetDisplayModePropertiesKHR(instance.getProcAddr( "vkGetDisplayModePropertiesKHR"));
-      vkGetDisplayPlaneCapabilities2KHR = PFN_vkGetDisplayPlaneCapabilities2KHR(instance.getProcAddr( "vkGetDisplayPlaneCapabilities2KHR"));
-      vkGetDisplayPlaneCapabilitiesKHR = PFN_vkGetDisplayPlaneCapabilitiesKHR(instance.getProcAddr( "vkGetDisplayPlaneCapabilitiesKHR"));
-      vkGetDisplayPlaneSupportedDisplaysKHR = PFN_vkGetDisplayPlaneSupportedDisplaysKHR(instance.getProcAddr( "vkGetDisplayPlaneSupportedDisplaysKHR"));
-      vkGetEventStatus = PFN_vkGetEventStatus(device ? device.getProcAddr( "vkGetEventStatus") : instance.getProcAddr( "vkGetEventStatus"));
-      vkGetFenceFdKHR = PFN_vkGetFenceFdKHR(device ? device.getProcAddr( "vkGetFenceFdKHR") : instance.getProcAddr( "vkGetFenceFdKHR"));
-      vkGetFenceStatus = PFN_vkGetFenceStatus(device ? device.getProcAddr( "vkGetFenceStatus") : instance.getProcAddr( "vkGetFenceStatus"));
+      vkDebugReportMessageEXT = PFN_vkDebugReportMessageEXT( vkGetInstanceProcAddr( instance, "vkDebugReportMessageEXT" ) );
+      vkDestroyDebugReportCallbackEXT = PFN_vkDestroyDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugReportCallbackEXT" ) );
+      vkDestroyDebugUtilsMessengerEXT = PFN_vkDestroyDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugUtilsMessengerEXT" ) );
+      vkDestroyInstance = PFN_vkDestroyInstance( vkGetInstanceProcAddr( instance, "vkDestroyInstance" ) );
+      vkDestroySurfaceKHR = PFN_vkDestroySurfaceKHR( vkGetInstanceProcAddr( instance, "vkDestroySurfaceKHR" ) );
+      vkEnumerateDeviceExtensionProperties = PFN_vkEnumerateDeviceExtensionProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceExtensionProperties" ) );
+      vkEnumerateDeviceLayerProperties = PFN_vkEnumerateDeviceLayerProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceLayerProperties" ) );
+      vkEnumeratePhysicalDeviceGroupsKHR = PFN_vkEnumeratePhysicalDeviceGroupsKHR( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroupsKHR" ) );
+      vkEnumeratePhysicalDeviceGroups = PFN_vkEnumeratePhysicalDeviceGroups( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroups" ) );
+      if ( !vkEnumeratePhysicalDeviceGroups ) vkEnumeratePhysicalDeviceGroups = vkEnumeratePhysicalDeviceGroupsKHR;
+      vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR" ) );
+      vkEnumeratePhysicalDevices = PFN_vkEnumeratePhysicalDevices( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDevices" ) );
+      vkGetDisplayModeProperties2KHR = PFN_vkGetDisplayModeProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModeProperties2KHR" ) );
+      vkGetDisplayModePropertiesKHR = PFN_vkGetDisplayModePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModePropertiesKHR" ) );
+      vkGetDisplayPlaneCapabilities2KHR = PFN_vkGetDisplayPlaneCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilities2KHR" ) );
+      vkGetDisplayPlaneCapabilitiesKHR = PFN_vkGetDisplayPlaneCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilitiesKHR" ) );
+      vkGetDisplayPlaneSupportedDisplaysKHR = PFN_vkGetDisplayPlaneSupportedDisplaysKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneSupportedDisplaysKHR" ) );
+      vkGetInstanceProcAddr = PFN_vkGetInstanceProcAddr( vkGetInstanceProcAddr( instance, "vkGetInstanceProcAddr" ) );
+      vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsEXT" ) );
+      vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesNV" ) );
+#ifdef VK_USE_PLATFORM_DIRECTFB_EXT
+      vkGetPhysicalDeviceDirectFBPresentationSupportEXT = PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDirectFBPresentationSupportEXT" ) );
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
+      vkGetPhysicalDeviceDisplayPlaneProperties2KHR = PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlaneProperties2KHR" ) );
+      vkGetPhysicalDeviceDisplayPlanePropertiesKHR = PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlanePropertiesKHR" ) );
+      vkGetPhysicalDeviceDisplayProperties2KHR = PFN_vkGetPhysicalDeviceDisplayProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayProperties2KHR" ) );
+      vkGetPhysicalDeviceDisplayPropertiesKHR = PFN_vkGetPhysicalDeviceDisplayPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPropertiesKHR" ) );
+      vkGetPhysicalDeviceExternalBufferPropertiesKHR = PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferPropertiesKHR" ) );
+      vkGetPhysicalDeviceExternalBufferProperties = PFN_vkGetPhysicalDeviceExternalBufferProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferProperties" ) );
+      if ( !vkGetPhysicalDeviceExternalBufferProperties ) vkGetPhysicalDeviceExternalBufferProperties = vkGetPhysicalDeviceExternalBufferPropertiesKHR;
+      vkGetPhysicalDeviceExternalFencePropertiesKHR = PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFencePropertiesKHR" ) );
+      vkGetPhysicalDeviceExternalFenceProperties = PFN_vkGetPhysicalDeviceExternalFenceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFenceProperties" ) );
+      if ( !vkGetPhysicalDeviceExternalFenceProperties ) vkGetPhysicalDeviceExternalFenceProperties = vkGetPhysicalDeviceExternalFencePropertiesKHR;
+      vkGetPhysicalDeviceExternalImageFormatPropertiesNV = PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalImageFormatPropertiesNV" ) );
+      vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR" ) );
+      vkGetPhysicalDeviceExternalSemaphoreProperties = PFN_vkGetPhysicalDeviceExternalSemaphoreProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphoreProperties" ) );
+      if ( !vkGetPhysicalDeviceExternalSemaphoreProperties ) vkGetPhysicalDeviceExternalSemaphoreProperties = vkGetPhysicalDeviceExternalSemaphorePropertiesKHR;
+      vkGetPhysicalDeviceFeatures = PFN_vkGetPhysicalDeviceFeatures( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures" ) );
+      vkGetPhysicalDeviceFeatures2KHR = PFN_vkGetPhysicalDeviceFeatures2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2KHR" ) );
+      vkGetPhysicalDeviceFeatures2 = PFN_vkGetPhysicalDeviceFeatures2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2" ) );
+      if ( !vkGetPhysicalDeviceFeatures2 ) vkGetPhysicalDeviceFeatures2 = vkGetPhysicalDeviceFeatures2KHR;
+      vkGetPhysicalDeviceFormatProperties = PFN_vkGetPhysicalDeviceFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties" ) );
+      vkGetPhysicalDeviceFormatProperties2KHR = PFN_vkGetPhysicalDeviceFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2KHR" ) );
+      vkGetPhysicalDeviceFormatProperties2 = PFN_vkGetPhysicalDeviceFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2" ) );
+      if ( !vkGetPhysicalDeviceFormatProperties2 ) vkGetPhysicalDeviceFormatProperties2 = vkGetPhysicalDeviceFormatProperties2KHR;
+      vkGetPhysicalDeviceFragmentShadingRatesKHR = PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFragmentShadingRatesKHR" ) );
+      vkGetPhysicalDeviceImageFormatProperties = PFN_vkGetPhysicalDeviceImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties" ) );
+      vkGetPhysicalDeviceImageFormatProperties2KHR = PFN_vkGetPhysicalDeviceImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2KHR" ) );
+      vkGetPhysicalDeviceImageFormatProperties2 = PFN_vkGetPhysicalDeviceImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2" ) );
+      if ( !vkGetPhysicalDeviceImageFormatProperties2 ) vkGetPhysicalDeviceImageFormatProperties2 = vkGetPhysicalDeviceImageFormatProperties2KHR;
+      vkGetPhysicalDeviceMemoryProperties = PFN_vkGetPhysicalDeviceMemoryProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties" ) );
+      vkGetPhysicalDeviceMemoryProperties2KHR = PFN_vkGetPhysicalDeviceMemoryProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2KHR" ) );
+      vkGetPhysicalDeviceMemoryProperties2 = PFN_vkGetPhysicalDeviceMemoryProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2" ) );
+      if ( !vkGetPhysicalDeviceMemoryProperties2 ) vkGetPhysicalDeviceMemoryProperties2 = vkGetPhysicalDeviceMemoryProperties2KHR;
+      vkGetPhysicalDeviceMultisamplePropertiesEXT = PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMultisamplePropertiesEXT" ) );
+      vkGetPhysicalDevicePresentRectanglesKHR = PFN_vkGetPhysicalDevicePresentRectanglesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDevicePresentRectanglesKHR" ) );
+      vkGetPhysicalDeviceProperties = PFN_vkGetPhysicalDeviceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties" ) );
+      vkGetPhysicalDeviceProperties2KHR = PFN_vkGetPhysicalDeviceProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2KHR" ) );
+      vkGetPhysicalDeviceProperties2 = PFN_vkGetPhysicalDeviceProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2" ) );
+      if ( !vkGetPhysicalDeviceProperties2 ) vkGetPhysicalDeviceProperties2 = vkGetPhysicalDeviceProperties2KHR;
+      vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR" ) );
+      vkGetPhysicalDeviceQueueFamilyProperties = PFN_vkGetPhysicalDeviceQueueFamilyProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties" ) );
+      vkGetPhysicalDeviceQueueFamilyProperties2KHR = PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2KHR" ) );
+      vkGetPhysicalDeviceQueueFamilyProperties2 = PFN_vkGetPhysicalDeviceQueueFamilyProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2" ) );
+      if ( !vkGetPhysicalDeviceQueueFamilyProperties2 ) vkGetPhysicalDeviceQueueFamilyProperties2 = vkGetPhysicalDeviceQueueFamilyProperties2KHR;
+      vkGetPhysicalDeviceSparseImageFormatProperties = PFN_vkGetPhysicalDeviceSparseImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties" ) );
+      vkGetPhysicalDeviceSparseImageFormatProperties2KHR = PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2KHR" ) );
+      vkGetPhysicalDeviceSparseImageFormatProperties2 = PFN_vkGetPhysicalDeviceSparseImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2" ) );
+      if ( !vkGetPhysicalDeviceSparseImageFormatProperties2 ) vkGetPhysicalDeviceSparseImageFormatProperties2 = vkGetPhysicalDeviceSparseImageFormatProperties2KHR;
+      vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV" ) );
+      vkGetPhysicalDeviceSurfaceCapabilities2EXT = PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2EXT" ) );
+      vkGetPhysicalDeviceSurfaceCapabilities2KHR = PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2KHR" ) );
+      vkGetPhysicalDeviceSurfaceCapabilitiesKHR = PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR" ) );
+      vkGetPhysicalDeviceSurfaceFormats2KHR = PFN_vkGetPhysicalDeviceSurfaceFormats2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormats2KHR" ) );
+      vkGetPhysicalDeviceSurfaceFormatsKHR = PFN_vkGetPhysicalDeviceSurfaceFormatsKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormatsKHR" ) );
 #ifdef VK_USE_PLATFORM_WIN32_KHR
-      vkGetFenceWin32HandleKHR = PFN_vkGetFenceWin32HandleKHR(device ? device.getProcAddr( "vkGetFenceWin32HandleKHR") : instance.getProcAddr( "vkGetFenceWin32HandleKHR"));
+      vkGetPhysicalDeviceSurfacePresentModes2EXT = PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModes2EXT" ) );
 #endif /*VK_USE_PLATFORM_WIN32_KHR*/
-      vkGetImageDrmFormatModifierPropertiesEXT = PFN_vkGetImageDrmFormatModifierPropertiesEXT(device ? device.getProcAddr( "vkGetImageDrmFormatModifierPropertiesEXT") : instance.getProcAddr( "vkGetImageDrmFormatModifierPropertiesEXT"));
-      vkGetImageMemoryRequirements = PFN_vkGetImageMemoryRequirements(device ? device.getProcAddr( "vkGetImageMemoryRequirements") : instance.getProcAddr( "vkGetImageMemoryRequirements"));
-      vkGetImageMemoryRequirements2 = PFN_vkGetImageMemoryRequirements2(device ? device.getProcAddr( "vkGetImageMemoryRequirements2") : instance.getProcAddr( "vkGetImageMemoryRequirements2"));
-      vkGetImageMemoryRequirements2KHR = PFN_vkGetImageMemoryRequirements2KHR(device ? device.getProcAddr( "vkGetImageMemoryRequirements2KHR") : instance.getProcAddr( "vkGetImageMemoryRequirements2KHR"));
-      vkGetImageSparseMemoryRequirements = PFN_vkGetImageSparseMemoryRequirements(device ? device.getProcAddr( "vkGetImageSparseMemoryRequirements") : instance.getProcAddr( "vkGetImageSparseMemoryRequirements"));
-      vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2(device ? device.getProcAddr( "vkGetImageSparseMemoryRequirements2") : instance.getProcAddr( "vkGetImageSparseMemoryRequirements2"));
-      vkGetImageSparseMemoryRequirements2KHR = PFN_vkGetImageSparseMemoryRequirements2KHR(device ? device.getProcAddr( "vkGetImageSparseMemoryRequirements2KHR") : instance.getProcAddr( "vkGetImageSparseMemoryRequirements2KHR"));
-      vkGetImageSubresourceLayout = PFN_vkGetImageSubresourceLayout(device ? device.getProcAddr( "vkGetImageSubresourceLayout") : instance.getProcAddr( "vkGetImageSubresourceLayout"));
-      vkGetInstanceProcAddr = PFN_vkGetInstanceProcAddr(instance.getProcAddr( "vkGetInstanceProcAddr"));
-#ifdef VK_USE_PLATFORM_ANDROID_ANDROID
-      vkGetMemoryAndroidHardwareBufferANDROID = PFN_vkGetMemoryAndroidHardwareBufferANDROID(device ? device.getProcAddr( "vkGetMemoryAndroidHardwareBufferANDROID") : instance.getProcAddr( "vkGetMemoryAndroidHardwareBufferANDROID"));
-#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/
-      vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR(device ? device.getProcAddr( "vkGetMemoryFdKHR") : instance.getProcAddr( "vkGetMemoryFdKHR"));
-      vkGetMemoryFdPropertiesKHR = PFN_vkGetMemoryFdPropertiesKHR(device ? device.getProcAddr( "vkGetMemoryFdPropertiesKHR") : instance.getProcAddr( "vkGetMemoryFdPropertiesKHR"));
-      vkGetMemoryHostPointerPropertiesEXT = PFN_vkGetMemoryHostPointerPropertiesEXT(device ? device.getProcAddr( "vkGetMemoryHostPointerPropertiesEXT") : instance.getProcAddr( "vkGetMemoryHostPointerPropertiesEXT"));
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-      vkGetMemoryWin32HandleKHR = PFN_vkGetMemoryWin32HandleKHR(device ? device.getProcAddr( "vkGetMemoryWin32HandleKHR") : instance.getProcAddr( "vkGetMemoryWin32HandleKHR"));
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-#ifdef VK_USE_PLATFORM_WIN32_NV
-      vkGetMemoryWin32HandleNV = PFN_vkGetMemoryWin32HandleNV(device ? device.getProcAddr( "vkGetMemoryWin32HandleNV") : instance.getProcAddr( "vkGetMemoryWin32HandleNV"));
-#endif /*VK_USE_PLATFORM_WIN32_NV*/
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-      vkGetMemoryWin32HandlePropertiesKHR = PFN_vkGetMemoryWin32HandlePropertiesKHR(device ? device.getProcAddr( "vkGetMemoryWin32HandlePropertiesKHR") : instance.getProcAddr( "vkGetMemoryWin32HandlePropertiesKHR"));
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-      vkGetPastPresentationTimingGOOGLE = PFN_vkGetPastPresentationTimingGOOGLE(device ? device.getProcAddr( "vkGetPastPresentationTimingGOOGLE") : instance.getProcAddr( "vkGetPastPresentationTimingGOOGLE"));
-      vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT(instance.getProcAddr( "vkGetPhysicalDeviceCalibrateableTimeDomainsEXT"));
-      vkGetPhysicalDeviceDisplayPlaneProperties2KHR = PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR(instance.getProcAddr( "vkGetPhysicalDeviceDisplayPlaneProperties2KHR"));
-      vkGetPhysicalDeviceDisplayPlanePropertiesKHR = PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR(instance.getProcAddr( "vkGetPhysicalDeviceDisplayPlanePropertiesKHR"));
-      vkGetPhysicalDeviceDisplayProperties2KHR = PFN_vkGetPhysicalDeviceDisplayProperties2KHR(instance.getProcAddr( "vkGetPhysicalDeviceDisplayProperties2KHR"));
-      vkGetPhysicalDeviceDisplayPropertiesKHR = PFN_vkGetPhysicalDeviceDisplayPropertiesKHR(instance.getProcAddr( "vkGetPhysicalDeviceDisplayPropertiesKHR"));
-      vkGetPhysicalDeviceExternalBufferProperties = PFN_vkGetPhysicalDeviceExternalBufferProperties(instance.getProcAddr( "vkGetPhysicalDeviceExternalBufferProperties"));
-      vkGetPhysicalDeviceExternalBufferPropertiesKHR = PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR(instance.getProcAddr( "vkGetPhysicalDeviceExternalBufferPropertiesKHR"));
-      vkGetPhysicalDeviceExternalFenceProperties = PFN_vkGetPhysicalDeviceExternalFenceProperties(instance.getProcAddr( "vkGetPhysicalDeviceExternalFenceProperties"));
-      vkGetPhysicalDeviceExternalFencePropertiesKHR = PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR(instance.getProcAddr( "vkGetPhysicalDeviceExternalFencePropertiesKHR"));
-      vkGetPhysicalDeviceExternalImageFormatPropertiesNV = PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV(instance.getProcAddr( "vkGetPhysicalDeviceExternalImageFormatPropertiesNV"));
-      vkGetPhysicalDeviceExternalSemaphoreProperties = PFN_vkGetPhysicalDeviceExternalSemaphoreProperties(instance.getProcAddr( "vkGetPhysicalDeviceExternalSemaphoreProperties"));
-      vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR(instance.getProcAddr( "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR"));
-      vkGetPhysicalDeviceFeatures = PFN_vkGetPhysicalDeviceFeatures(instance.getProcAddr( "vkGetPhysicalDeviceFeatures"));
-      vkGetPhysicalDeviceFeatures2 = PFN_vkGetPhysicalDeviceFeatures2(instance.getProcAddr( "vkGetPhysicalDeviceFeatures2"));
-      vkGetPhysicalDeviceFeatures2KHR = PFN_vkGetPhysicalDeviceFeatures2KHR(instance.getProcAddr( "vkGetPhysicalDeviceFeatures2KHR"));
-      vkGetPhysicalDeviceFormatProperties = PFN_vkGetPhysicalDeviceFormatProperties(instance.getProcAddr( "vkGetPhysicalDeviceFormatProperties"));
-      vkGetPhysicalDeviceFormatProperties2 = PFN_vkGetPhysicalDeviceFormatProperties2(instance.getProcAddr( "vkGetPhysicalDeviceFormatProperties2"));
-      vkGetPhysicalDeviceFormatProperties2KHR = PFN_vkGetPhysicalDeviceFormatProperties2KHR(instance.getProcAddr( "vkGetPhysicalDeviceFormatProperties2KHR"));
-      vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX = PFN_vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX(instance.getProcAddr( "vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX"));
-      vkGetPhysicalDeviceImageFormatProperties = PFN_vkGetPhysicalDeviceImageFormatProperties(instance.getProcAddr( "vkGetPhysicalDeviceImageFormatProperties"));
-      vkGetPhysicalDeviceImageFormatProperties2 = PFN_vkGetPhysicalDeviceImageFormatProperties2(instance.getProcAddr( "vkGetPhysicalDeviceImageFormatProperties2"));
-      vkGetPhysicalDeviceImageFormatProperties2KHR = PFN_vkGetPhysicalDeviceImageFormatProperties2KHR(instance.getProcAddr( "vkGetPhysicalDeviceImageFormatProperties2KHR"));
-      vkGetPhysicalDeviceMemoryProperties = PFN_vkGetPhysicalDeviceMemoryProperties(instance.getProcAddr( "vkGetPhysicalDeviceMemoryProperties"));
-      vkGetPhysicalDeviceMemoryProperties2 = PFN_vkGetPhysicalDeviceMemoryProperties2(instance.getProcAddr( "vkGetPhysicalDeviceMemoryProperties2"));
-      vkGetPhysicalDeviceMemoryProperties2KHR = PFN_vkGetPhysicalDeviceMemoryProperties2KHR(instance.getProcAddr( "vkGetPhysicalDeviceMemoryProperties2KHR"));
-      vkGetPhysicalDeviceMultisamplePropertiesEXT = PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT(instance.getProcAddr( "vkGetPhysicalDeviceMultisamplePropertiesEXT"));
-      vkGetPhysicalDevicePresentRectanglesKHR = PFN_vkGetPhysicalDevicePresentRectanglesKHR(instance.getProcAddr( "vkGetPhysicalDevicePresentRectanglesKHR"));
-      vkGetPhysicalDeviceProperties = PFN_vkGetPhysicalDeviceProperties(instance.getProcAddr( "vkGetPhysicalDeviceProperties"));
-      vkGetPhysicalDeviceProperties2 = PFN_vkGetPhysicalDeviceProperties2(instance.getProcAddr( "vkGetPhysicalDeviceProperties2"));
-      vkGetPhysicalDeviceProperties2KHR = PFN_vkGetPhysicalDeviceProperties2KHR(instance.getProcAddr( "vkGetPhysicalDeviceProperties2KHR"));
-      vkGetPhysicalDeviceQueueFamilyProperties = PFN_vkGetPhysicalDeviceQueueFamilyProperties(instance.getProcAddr( "vkGetPhysicalDeviceQueueFamilyProperties"));
-      vkGetPhysicalDeviceQueueFamilyProperties2 = PFN_vkGetPhysicalDeviceQueueFamilyProperties2(instance.getProcAddr( "vkGetPhysicalDeviceQueueFamilyProperties2"));
-      vkGetPhysicalDeviceQueueFamilyProperties2KHR = PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR(instance.getProcAddr( "vkGetPhysicalDeviceQueueFamilyProperties2KHR"));
-      vkGetPhysicalDeviceSparseImageFormatProperties = PFN_vkGetPhysicalDeviceSparseImageFormatProperties(instance.getProcAddr( "vkGetPhysicalDeviceSparseImageFormatProperties"));
-      vkGetPhysicalDeviceSparseImageFormatProperties2 = PFN_vkGetPhysicalDeviceSparseImageFormatProperties2(instance.getProcAddr( "vkGetPhysicalDeviceSparseImageFormatProperties2"));
-      vkGetPhysicalDeviceSparseImageFormatProperties2KHR = PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR(instance.getProcAddr( "vkGetPhysicalDeviceSparseImageFormatProperties2KHR"));
-      vkGetPhysicalDeviceSurfaceCapabilities2EXT = PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT(instance.getProcAddr( "vkGetPhysicalDeviceSurfaceCapabilities2EXT"));
-      vkGetPhysicalDeviceSurfaceCapabilities2KHR = PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR(instance.getProcAddr( "vkGetPhysicalDeviceSurfaceCapabilities2KHR"));
-      vkGetPhysicalDeviceSurfaceCapabilitiesKHR = PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR(instance.getProcAddr( "vkGetPhysicalDeviceSurfaceCapabilitiesKHR"));
-      vkGetPhysicalDeviceSurfaceFormats2KHR = PFN_vkGetPhysicalDeviceSurfaceFormats2KHR(instance.getProcAddr( "vkGetPhysicalDeviceSurfaceFormats2KHR"));
-      vkGetPhysicalDeviceSurfaceFormatsKHR = PFN_vkGetPhysicalDeviceSurfaceFormatsKHR(instance.getProcAddr( "vkGetPhysicalDeviceSurfaceFormatsKHR"));
-      vkGetPhysicalDeviceSurfacePresentModesKHR = PFN_vkGetPhysicalDeviceSurfacePresentModesKHR(instance.getProcAddr( "vkGetPhysicalDeviceSurfacePresentModesKHR"));
-      vkGetPhysicalDeviceSurfaceSupportKHR = PFN_vkGetPhysicalDeviceSurfaceSupportKHR(instance.getProcAddr( "vkGetPhysicalDeviceSurfaceSupportKHR"));
+      vkGetPhysicalDeviceSurfacePresentModesKHR = PFN_vkGetPhysicalDeviceSurfacePresentModesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModesKHR" ) );
+      vkGetPhysicalDeviceSurfaceSupportKHR = PFN_vkGetPhysicalDeviceSurfaceSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceSupportKHR" ) );
+      vkGetPhysicalDeviceToolPropertiesEXT = PFN_vkGetPhysicalDeviceToolPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolPropertiesEXT" ) );
 #ifdef VK_USE_PLATFORM_WAYLAND_KHR
-      vkGetPhysicalDeviceWaylandPresentationSupportKHR = PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR(instance.getProcAddr( "vkGetPhysicalDeviceWaylandPresentationSupportKHR"));
+      vkGetPhysicalDeviceWaylandPresentationSupportKHR = PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWaylandPresentationSupportKHR" ) );
 #endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
 #ifdef VK_USE_PLATFORM_WIN32_KHR
-      vkGetPhysicalDeviceWin32PresentationSupportKHR = PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR(instance.getProcAddr( "vkGetPhysicalDeviceWin32PresentationSupportKHR"));
+      vkGetPhysicalDeviceWin32PresentationSupportKHR = PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWin32PresentationSupportKHR" ) );
 #endif /*VK_USE_PLATFORM_WIN32_KHR*/
 #ifdef VK_USE_PLATFORM_XCB_KHR
-      vkGetPhysicalDeviceXcbPresentationSupportKHR = PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR(instance.getProcAddr( "vkGetPhysicalDeviceXcbPresentationSupportKHR"));
+      vkGetPhysicalDeviceXcbPresentationSupportKHR = PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXcbPresentationSupportKHR" ) );
 #endif /*VK_USE_PLATFORM_XCB_KHR*/
 #ifdef VK_USE_PLATFORM_XLIB_KHR
-      vkGetPhysicalDeviceXlibPresentationSupportKHR = PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR(instance.getProcAddr( "vkGetPhysicalDeviceXlibPresentationSupportKHR"));
+      vkGetPhysicalDeviceXlibPresentationSupportKHR = PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXlibPresentationSupportKHR" ) );
 #endif /*VK_USE_PLATFORM_XLIB_KHR*/
-      vkGetPipelineCacheData = PFN_vkGetPipelineCacheData(device ? device.getProcAddr( "vkGetPipelineCacheData") : instance.getProcAddr( "vkGetPipelineCacheData"));
-      vkGetQueryPoolResults = PFN_vkGetQueryPoolResults(device ? device.getProcAddr( "vkGetQueryPoolResults") : instance.getProcAddr( "vkGetQueryPoolResults"));
-      vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV(device ? device.getProcAddr( "vkGetQueueCheckpointDataNV") : instance.getProcAddr( "vkGetQueueCheckpointDataNV"));
-#ifdef VK_USE_PLATFORM_XLIB_XRANDR_NV
-      vkGetRandROutputDisplayEXT = PFN_vkGetRandROutputDisplayEXT(instance.getProcAddr( "vkGetRandROutputDisplayEXT"));
-#endif /*VK_USE_PLATFORM_XLIB_XRANDR_NV*/
-      vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV(device ? device.getProcAddr( "vkGetRayTracingShaderGroupHandlesNV") : instance.getProcAddr( "vkGetRayTracingShaderGroupHandlesNV"));
-      vkGetRefreshCycleDurationGOOGLE = PFN_vkGetRefreshCycleDurationGOOGLE(device ? device.getProcAddr( "vkGetRefreshCycleDurationGOOGLE") : instance.getProcAddr( "vkGetRefreshCycleDurationGOOGLE"));
-      vkGetRenderAreaGranularity = PFN_vkGetRenderAreaGranularity(device ? device.getProcAddr( "vkGetRenderAreaGranularity") : instance.getProcAddr( "vkGetRenderAreaGranularity"));
-      vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR(device ? device.getProcAddr( "vkGetSemaphoreFdKHR") : instance.getProcAddr( "vkGetSemaphoreFdKHR"));
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+      vkGetRandROutputDisplayEXT = PFN_vkGetRandROutputDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetRandROutputDisplayEXT" ) );
+#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
+      vkReleaseDisplayEXT = PFN_vkReleaseDisplayEXT( vkGetInstanceProcAddr( instance, "vkReleaseDisplayEXT" ) );
+      vkSubmitDebugUtilsMessageEXT = PFN_vkSubmitDebugUtilsMessageEXT( vkGetInstanceProcAddr( instance, "vkSubmitDebugUtilsMessageEXT" ) );
 #ifdef VK_USE_PLATFORM_WIN32_KHR
-      vkGetSemaphoreWin32HandleKHR = PFN_vkGetSemaphoreWin32HandleKHR(device ? device.getProcAddr( "vkGetSemaphoreWin32HandleKHR") : instance.getProcAddr( "vkGetSemaphoreWin32HandleKHR"));
+      vkAcquireFullScreenExclusiveModeEXT = PFN_vkAcquireFullScreenExclusiveModeEXT( vkGetInstanceProcAddr( instance, "vkAcquireFullScreenExclusiveModeEXT" ) );
 #endif /*VK_USE_PLATFORM_WIN32_KHR*/
-      vkGetShaderInfoAMD = PFN_vkGetShaderInfoAMD(device ? device.getProcAddr( "vkGetShaderInfoAMD") : instance.getProcAddr( "vkGetShaderInfoAMD"));
-      vkGetSwapchainCounterEXT = PFN_vkGetSwapchainCounterEXT(device ? device.getProcAddr( "vkGetSwapchainCounterEXT") : instance.getProcAddr( "vkGetSwapchainCounterEXT"));
-      vkGetSwapchainImagesKHR = PFN_vkGetSwapchainImagesKHR(device ? device.getProcAddr( "vkGetSwapchainImagesKHR") : instance.getProcAddr( "vkGetSwapchainImagesKHR"));
-      vkGetSwapchainStatusKHR = PFN_vkGetSwapchainStatusKHR(device ? device.getProcAddr( "vkGetSwapchainStatusKHR") : instance.getProcAddr( "vkGetSwapchainStatusKHR"));
-      vkGetValidationCacheDataEXT = PFN_vkGetValidationCacheDataEXT(device ? device.getProcAddr( "vkGetValidationCacheDataEXT") : instance.getProcAddr( "vkGetValidationCacheDataEXT"));
-      vkImportFenceFdKHR = PFN_vkImportFenceFdKHR(device ? device.getProcAddr( "vkImportFenceFdKHR") : instance.getProcAddr( "vkImportFenceFdKHR"));
+      vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR( vkGetInstanceProcAddr( instance, "vkAcquireNextImage2KHR" ) );
+      vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetInstanceProcAddr( instance, "vkAcquireNextImageKHR" ) );
+      vkAcquirePerformanceConfigurationINTEL = PFN_vkAcquirePerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkAcquirePerformanceConfigurationINTEL" ) );
+      vkAcquireProfilingLockKHR = PFN_vkAcquireProfilingLockKHR( vkGetInstanceProcAddr( instance, "vkAcquireProfilingLockKHR" ) );
+      vkAllocateCommandBuffers = PFN_vkAllocateCommandBuffers( vkGetInstanceProcAddr( instance, "vkAllocateCommandBuffers" ) );
+      vkAllocateDescriptorSets = PFN_vkAllocateDescriptorSets( vkGetInstanceProcAddr( instance, "vkAllocateDescriptorSets" ) );
+      vkAllocateMemory = PFN_vkAllocateMemory( vkGetInstanceProcAddr( instance, "vkAllocateMemory" ) );
+      vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetInstanceProcAddr( instance, "vkBeginCommandBuffer" ) );
+      vkBindAccelerationStructureMemoryNV = PFN_vkBindAccelerationStructureMemoryNV( vkGetInstanceProcAddr( instance, "vkBindAccelerationStructureMemoryNV" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+      vkBindAccelerationStructureMemoryKHR = PFN_vkBindAccelerationStructureMemoryKHR( vkGetInstanceProcAddr( instance, "vkBindAccelerationStructureMemoryKHR" ) );
+      if ( !vkBindAccelerationStructureMemoryKHR ) vkBindAccelerationStructureMemoryKHR = vkBindAccelerationStructureMemoryNV;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+      vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetInstanceProcAddr( instance, "vkBindBufferMemory" ) );
+      vkBindBufferMemory2KHR = PFN_vkBindBufferMemory2KHR( vkGetInstanceProcAddr( instance, "vkBindBufferMemory2KHR" ) );
+      vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetInstanceProcAddr( instance, "vkBindBufferMemory2" ) );
+      if ( !vkBindBufferMemory2 ) vkBindBufferMemory2 = vkBindBufferMemory2KHR;
+      vkBindImageMemory = PFN_vkBindImageMemory( vkGetInstanceProcAddr( instance, "vkBindImageMemory" ) );
+      vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetInstanceProcAddr( instance, "vkBindImageMemory2KHR" ) );
+      vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetInstanceProcAddr( instance, "vkBindImageMemory2" ) );
+      if ( !vkBindImageMemory2 ) vkBindImageMemory2 = vkBindImageMemory2KHR;
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+      vkBuildAccelerationStructureKHR = PFN_vkBuildAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkBuildAccelerationStructureKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+      vkCmdBeginConditionalRenderingEXT = PFN_vkCmdBeginConditionalRenderingEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginConditionalRenderingEXT" ) );
+      vkCmdBeginDebugUtilsLabelEXT = PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginDebugUtilsLabelEXT" ) );
+      vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetInstanceProcAddr( instance, "vkCmdBeginQuery" ) );
+      vkCmdBeginQueryIndexedEXT = PFN_vkCmdBeginQueryIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginQueryIndexedEXT" ) );
+      vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass" ) );
+      vkCmdBeginRenderPass2KHR = PFN_vkCmdBeginRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass2KHR" ) );
+      vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass2" ) );
+      if ( !vkCmdBeginRenderPass2 ) vkCmdBeginRenderPass2 = vkCmdBeginRenderPass2KHR;
+      vkCmdBeginTransformFeedbackEXT = PFN_vkCmdBeginTransformFeedbackEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginTransformFeedbackEXT" ) );
+      vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorSets" ) );
+      vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetInstanceProcAddr( instance, "vkCmdBindIndexBuffer" ) );
+      vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetInstanceProcAddr( instance, "vkCmdBindPipeline" ) );
+      vkCmdBindPipelineShaderGroupNV = PFN_vkCmdBindPipelineShaderGroupNV( vkGetInstanceProcAddr( instance, "vkCmdBindPipelineShaderGroupNV" ) );
+      vkCmdBindShadingRateImageNV = PFN_vkCmdBindShadingRateImageNV( vkGetInstanceProcAddr( instance, "vkCmdBindShadingRateImageNV" ) );
+      vkCmdBindTransformFeedbackBuffersEXT = PFN_vkCmdBindTransformFeedbackBuffersEXT( vkGetInstanceProcAddr( instance, "vkCmdBindTransformFeedbackBuffersEXT" ) );
+      vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers" ) );
+      vkCmdBindVertexBuffers2EXT = PFN_vkCmdBindVertexBuffers2EXT( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers2EXT" ) );
+      vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetInstanceProcAddr( instance, "vkCmdBlitImage" ) );
+      vkCmdBlitImage2KHR = PFN_vkCmdBlitImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdBlitImage2KHR" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+      vkCmdBuildAccelerationStructureIndirectKHR = PFN_vkCmdBuildAccelerationStructureIndirectKHR( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructureIndirectKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+      vkCmdBuildAccelerationStructureKHR = PFN_vkCmdBuildAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructureKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+      vkCmdBuildAccelerationStructureNV = PFN_vkCmdBuildAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructureNV" ) );
+      vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetInstanceProcAddr( instance, "vkCmdClearAttachments" ) );
+      vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetInstanceProcAddr( instance, "vkCmdClearColorImage" ) );
+      vkCmdClearDepthStencilImage = PFN_vkCmdClearDepthStencilImage( vkGetInstanceProcAddr( instance, "vkCmdClearDepthStencilImage" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+      vkCmdCopyAccelerationStructureKHR = PFN_vkCmdCopyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+      vkCmdCopyAccelerationStructureNV = PFN_vkCmdCopyAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureNV" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+      vkCmdCopyAccelerationStructureToMemoryKHR = PFN_vkCmdCopyAccelerationStructureToMemoryKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureToMemoryKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+      vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetInstanceProcAddr( instance, "vkCmdCopyBuffer" ) );
+      vkCmdCopyBuffer2KHR = PFN_vkCmdCopyBuffer2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyBuffer2KHR" ) );
+      vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage" ) );
+      vkCmdCopyBufferToImage2KHR = PFN_vkCmdCopyBufferToImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage2KHR" ) );
+      vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetInstanceProcAddr( instance, "vkCmdCopyImage" ) );
+      vkCmdCopyImage2KHR = PFN_vkCmdCopyImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyImage2KHR" ) );
+      vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer" ) );
+      vkCmdCopyImageToBuffer2KHR = PFN_vkCmdCopyImageToBuffer2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer2KHR" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+      vkCmdCopyMemoryToAccelerationStructureKHR = PFN_vkCmdCopyMemoryToAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyMemoryToAccelerationStructureKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+      vkCmdCopyQueryPoolResults = PFN_vkCmdCopyQueryPoolResults( vkGetInstanceProcAddr( instance, "vkCmdCopyQueryPoolResults" ) );
+      vkCmdDebugMarkerBeginEXT = PFN_vkCmdDebugMarkerBeginEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerBeginEXT" ) );
+      vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerEndEXT" ) );
+      vkCmdDebugMarkerInsertEXT = PFN_vkCmdDebugMarkerInsertEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerInsertEXT" ) );
+      vkCmdDispatch = PFN_vkCmdDispatch( vkGetInstanceProcAddr( instance, "vkCmdDispatch" ) );
+      vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetInstanceProcAddr( instance, "vkCmdDispatchBaseKHR" ) );
+      vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetInstanceProcAddr( instance, "vkCmdDispatchBase" ) );
+      if ( !vkCmdDispatchBase ) vkCmdDispatchBase = vkCmdDispatchBaseKHR;
+      vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetInstanceProcAddr( instance, "vkCmdDispatchIndirect" ) );
+      vkCmdDraw = PFN_vkCmdDraw( vkGetInstanceProcAddr( instance, "vkCmdDraw" ) );
+      vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexed" ) );
+      vkCmdDrawIndexedIndirect = PFN_vkCmdDrawIndexedIndirect( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirect" ) );
+      vkCmdDrawIndexedIndirectCountAMD = PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCountAMD" ) );
+      vkCmdDrawIndexedIndirectCountKHR = PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCountKHR" ) );
+      vkCmdDrawIndexedIndirectCount = PFN_vkCmdDrawIndexedIndirectCount( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCount" ) );
+      if ( !vkCmdDrawIndexedIndirectCount ) vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountKHR;
+      if ( !vkCmdDrawIndexedIndirectCount ) vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountAMD;
+      vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirect" ) );
+      vkCmdDrawIndirectByteCountEXT = PFN_vkCmdDrawIndirectByteCountEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectByteCountEXT" ) );
+      vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCountAMD" ) );
+      vkCmdDrawIndirectCountKHR = PFN_vkCmdDrawIndirectCountKHR( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCountKHR" ) );
+      vkCmdDrawIndirectCount = PFN_vkCmdDrawIndirectCount( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCount" ) );
+      if ( !vkCmdDrawIndirectCount ) vkCmdDrawIndirectCount = vkCmdDrawIndirectCountKHR;
+      if ( !vkCmdDrawIndirectCount ) vkCmdDrawIndirectCount = vkCmdDrawIndirectCountAMD;
+      vkCmdDrawMeshTasksIndirectCountNV = PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectCountNV" ) );
+      vkCmdDrawMeshTasksIndirectNV = PFN_vkCmdDrawMeshTasksIndirectNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectNV" ) );
+      vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksNV" ) );
+      vkCmdEndConditionalRenderingEXT = PFN_vkCmdEndConditionalRenderingEXT( vkGetInstanceProcAddr( instance, "vkCmdEndConditionalRenderingEXT" ) );
+      vkCmdEndDebugUtilsLabelEXT = PFN_vkCmdEndDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdEndDebugUtilsLabelEXT" ) );
+      vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetInstanceProcAddr( instance, "vkCmdEndQuery" ) );
+      vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdEndQueryIndexedEXT" ) );
+      vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass" ) );
+      vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass2KHR" ) );
+      vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass2" ) );
+      if ( !vkCmdEndRenderPass2 ) vkCmdEndRenderPass2 = vkCmdEndRenderPass2KHR;
+      vkCmdEndTransformFeedbackEXT = PFN_vkCmdEndTransformFeedbackEXT( vkGetInstanceProcAddr( instance, "vkCmdEndTransformFeedbackEXT" ) );
+      vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetInstanceProcAddr( instance, "vkCmdExecuteCommands" ) );
+      vkCmdExecuteGeneratedCommandsNV = PFN_vkCmdExecuteGeneratedCommandsNV( vkGetInstanceProcAddr( instance, "vkCmdExecuteGeneratedCommandsNV" ) );
+      vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetInstanceProcAddr( instance, "vkCmdFillBuffer" ) );
+      vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdInsertDebugUtilsLabelEXT" ) );
+      vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass" ) );
+      vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass2KHR" ) );
+      vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass2" ) );
+      if ( !vkCmdNextSubpass2 ) vkCmdNextSubpass2 = vkCmdNextSubpass2KHR;
+      vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier" ) );
+      vkCmdPreprocessGeneratedCommandsNV = PFN_vkCmdPreprocessGeneratedCommandsNV( vkGetInstanceProcAddr( instance, "vkCmdPreprocessGeneratedCommandsNV" ) );
+      vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetInstanceProcAddr( instance, "vkCmdPushConstants" ) );
+      vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetKHR" ) );
+      vkCmdPushDescriptorSetWithTemplateKHR = PFN_vkCmdPushDescriptorSetWithTemplateKHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetWithTemplateKHR" ) );
+      vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetInstanceProcAddr( instance, "vkCmdResetEvent" ) );
+      vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetInstanceProcAddr( instance, "vkCmdResetQueryPool" ) );
+      vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetInstanceProcAddr( instance, "vkCmdResolveImage" ) );
+      vkCmdResolveImage2KHR = PFN_vkCmdResolveImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdResolveImage2KHR" ) );
+      vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants( vkGetInstanceProcAddr( instance, "vkCmdSetBlendConstants" ) );
+      vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetInstanceProcAddr( instance, "vkCmdSetCheckpointNV" ) );
+      vkCmdSetCoarseSampleOrderNV = PFN_vkCmdSetCoarseSampleOrderNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoarseSampleOrderNV" ) );
+      vkCmdSetCullModeEXT = PFN_vkCmdSetCullModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetCullModeEXT" ) );
+      vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBias" ) );
+      vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBounds" ) );
+      vkCmdSetDepthBoundsTestEnableEXT = PFN_vkCmdSetDepthBoundsTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBoundsTestEnableEXT" ) );
+      vkCmdSetDepthCompareOpEXT = PFN_vkCmdSetDepthCompareOpEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthCompareOpEXT" ) );
+      vkCmdSetDepthTestEnableEXT = PFN_vkCmdSetDepthTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthTestEnableEXT" ) );
+      vkCmdSetDepthWriteEnableEXT = PFN_vkCmdSetDepthWriteEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthWriteEnableEXT" ) );
+      vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( vkGetInstanceProcAddr( instance, "vkCmdSetDeviceMaskKHR" ) );
+      vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetInstanceProcAddr( instance, "vkCmdSetDeviceMask" ) );
+      if ( !vkCmdSetDeviceMask ) vkCmdSetDeviceMask = vkCmdSetDeviceMaskKHR;
+      vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDiscardRectangleEXT" ) );
+      vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetInstanceProcAddr( instance, "vkCmdSetEvent" ) );
+      vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( vkGetInstanceProcAddr( instance, "vkCmdSetExclusiveScissorNV" ) );
+      vkCmdSetFragmentShadingRateKHR = PFN_vkCmdSetFragmentShadingRateKHR( vkGetInstanceProcAddr( instance, "vkCmdSetFragmentShadingRateKHR" ) );
+      vkCmdSetFrontFaceEXT = PFN_vkCmdSetFrontFaceEXT( vkGetInstanceProcAddr( instance, "vkCmdSetFrontFaceEXT" ) );
+      vkCmdSetLineStippleEXT = PFN_vkCmdSetLineStippleEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLineStippleEXT" ) );
+      vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetInstanceProcAddr( instance, "vkCmdSetLineWidth" ) );
+      vkCmdSetPerformanceMarkerINTEL = PFN_vkCmdSetPerformanceMarkerINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceMarkerINTEL" ) );
+      vkCmdSetPerformanceOverrideINTEL = PFN_vkCmdSetPerformanceOverrideINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceOverrideINTEL" ) );
+      vkCmdSetPerformanceStreamMarkerINTEL = PFN_vkCmdSetPerformanceStreamMarkerINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceStreamMarkerINTEL" ) );
+      vkCmdSetPrimitiveTopologyEXT = PFN_vkCmdSetPrimitiveTopologyEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveTopologyEXT" ) );
+      vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetInstanceProcAddr( instance, "vkCmdSetSampleLocationsEXT" ) );
+      vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetInstanceProcAddr( instance, "vkCmdSetScissor" ) );
+      vkCmdSetScissorWithCountEXT = PFN_vkCmdSetScissorWithCountEXT( vkGetInstanceProcAddr( instance, "vkCmdSetScissorWithCountEXT" ) );
+      vkCmdSetStencilCompareMask = PFN_vkCmdSetStencilCompareMask( vkGetInstanceProcAddr( instance, "vkCmdSetStencilCompareMask" ) );
+      vkCmdSetStencilOpEXT = PFN_vkCmdSetStencilOpEXT( vkGetInstanceProcAddr( instance, "vkCmdSetStencilOpEXT" ) );
+      vkCmdSetStencilReference = PFN_vkCmdSetStencilReference( vkGetInstanceProcAddr( instance, "vkCmdSetStencilReference" ) );
+      vkCmdSetStencilTestEnableEXT = PFN_vkCmdSetStencilTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetStencilTestEnableEXT" ) );
+      vkCmdSetStencilWriteMask = PFN_vkCmdSetStencilWriteMask( vkGetInstanceProcAddr( instance, "vkCmdSetStencilWriteMask" ) );
+      vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetInstanceProcAddr( instance, "vkCmdSetViewport" ) );
+      vkCmdSetViewportShadingRatePaletteNV = PFN_vkCmdSetViewportShadingRatePaletteNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportShadingRatePaletteNV" ) );
+      vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWScalingNV" ) );
+      vkCmdSetViewportWithCountEXT = PFN_vkCmdSetViewportWithCountEXT( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWithCountEXT" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+      vkCmdTraceRaysIndirectKHR = PFN_vkCmdTraceRaysIndirectKHR( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysIndirectKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+      vkCmdTraceRaysKHR = PFN_vkCmdTraceRaysKHR( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+      vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysNV" ) );
+      vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetInstanceProcAddr( instance, "vkCmdUpdateBuffer" ) );
+      vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetInstanceProcAddr( instance, "vkCmdWaitEvents" ) );
+      vkCmdWriteAccelerationStructuresPropertiesNV = PFN_vkCmdWriteAccelerationStructuresPropertiesNV( vkGetInstanceProcAddr( instance, "vkCmdWriteAccelerationStructuresPropertiesNV" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+      vkCmdWriteAccelerationStructuresPropertiesKHR = PFN_vkCmdWriteAccelerationStructuresPropertiesKHR( vkGetInstanceProcAddr( instance, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) );
+      if ( !vkCmdWriteAccelerationStructuresPropertiesKHR ) vkCmdWriteAccelerationStructuresPropertiesKHR = vkCmdWriteAccelerationStructuresPropertiesNV;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+      vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetInstanceProcAddr( instance, "vkCmdWriteBufferMarkerAMD" ) );
+      vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp" ) );
+      vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetInstanceProcAddr( instance, "vkCompileDeferredNV" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+      vkCopyAccelerationStructureKHR = PFN_vkCopyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCopyAccelerationStructureKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+      vkCopyAccelerationStructureToMemoryKHR = PFN_vkCopyAccelerationStructureToMemoryKHR( vkGetInstanceProcAddr( instance, "vkCopyAccelerationStructureToMemoryKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+      vkCopyMemoryToAccelerationStructureKHR = PFN_vkCopyMemoryToAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCopyMemoryToAccelerationStructureKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+      vkCreateAccelerationStructureKHR = PFN_vkCreateAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCreateAccelerationStructureKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+      vkCreateAccelerationStructureNV = PFN_vkCreateAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCreateAccelerationStructureNV" ) );
+      vkCreateBuffer = PFN_vkCreateBuffer( vkGetInstanceProcAddr( instance, "vkCreateBuffer" ) );
+      vkCreateBufferView = PFN_vkCreateBufferView( vkGetInstanceProcAddr( instance, "vkCreateBufferView" ) );
+      vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetInstanceProcAddr( instance, "vkCreateCommandPool" ) );
+      vkCreateComputePipelines = PFN_vkCreateComputePipelines( vkGetInstanceProcAddr( instance, "vkCreateComputePipelines" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+      vkCreateDeferredOperationKHR = PFN_vkCreateDeferredOperationKHR( vkGetInstanceProcAddr( instance, "vkCreateDeferredOperationKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+      vkCreateDescriptorPool = PFN_vkCreateDescriptorPool( vkGetInstanceProcAddr( instance, "vkCreateDescriptorPool" ) );
+      vkCreateDescriptorSetLayout = PFN_vkCreateDescriptorSetLayout( vkGetInstanceProcAddr( instance, "vkCreateDescriptorSetLayout" ) );
+      vkCreateDescriptorUpdateTemplateKHR = PFN_vkCreateDescriptorUpdateTemplateKHR( vkGetInstanceProcAddr( instance, "vkCreateDescriptorUpdateTemplateKHR" ) );
+      vkCreateDescriptorUpdateTemplate = PFN_vkCreateDescriptorUpdateTemplate( vkGetInstanceProcAddr( instance, "vkCreateDescriptorUpdateTemplate" ) );
+      if ( !vkCreateDescriptorUpdateTemplate ) vkCreateDescriptorUpdateTemplate = vkCreateDescriptorUpdateTemplateKHR;
+      vkCreateEvent = PFN_vkCreateEvent( vkGetInstanceProcAddr( instance, "vkCreateEvent" ) );
+      vkCreateFence = PFN_vkCreateFence( vkGetInstanceProcAddr( instance, "vkCreateFence" ) );
+      vkCreateFramebuffer = PFN_vkCreateFramebuffer( vkGetInstanceProcAddr( instance, "vkCreateFramebuffer" ) );
+      vkCreateGraphicsPipelines = PFN_vkCreateGraphicsPipelines( vkGetInstanceProcAddr( instance, "vkCreateGraphicsPipelines" ) );
+      vkCreateImage = PFN_vkCreateImage( vkGetInstanceProcAddr( instance, "vkCreateImage" ) );
+      vkCreateImageView = PFN_vkCreateImageView( vkGetInstanceProcAddr( instance, "vkCreateImageView" ) );
+      vkCreateIndirectCommandsLayoutNV = PFN_vkCreateIndirectCommandsLayoutNV( vkGetInstanceProcAddr( instance, "vkCreateIndirectCommandsLayoutNV" ) );
+      vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetInstanceProcAddr( instance, "vkCreatePipelineCache" ) );
+      vkCreatePipelineLayout = PFN_vkCreatePipelineLayout( vkGetInstanceProcAddr( instance, "vkCreatePipelineLayout" ) );
+      vkCreatePrivateDataSlotEXT = PFN_vkCreatePrivateDataSlotEXT( vkGetInstanceProcAddr( instance, "vkCreatePrivateDataSlotEXT" ) );
+      vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetInstanceProcAddr( instance, "vkCreateQueryPool" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+      vkCreateRayTracingPipelinesKHR = PFN_vkCreateRayTracingPipelinesKHR( vkGetInstanceProcAddr( instance, "vkCreateRayTracingPipelinesKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+      vkCreateRayTracingPipelinesNV = PFN_vkCreateRayTracingPipelinesNV( vkGetInstanceProcAddr( instance, "vkCreateRayTracingPipelinesNV" ) );
+      vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetInstanceProcAddr( instance, "vkCreateRenderPass" ) );
+      vkCreateRenderPass2KHR = PFN_vkCreateRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCreateRenderPass2KHR" ) );
+      vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetInstanceProcAddr( instance, "vkCreateRenderPass2" ) );
+      if ( !vkCreateRenderPass2 ) vkCreateRenderPass2 = vkCreateRenderPass2KHR;
+      vkCreateSampler = PFN_vkCreateSampler( vkGetInstanceProcAddr( instance, "vkCreateSampler" ) );
+      vkCreateSamplerYcbcrConversionKHR = PFN_vkCreateSamplerYcbcrConversionKHR( vkGetInstanceProcAddr( instance, "vkCreateSamplerYcbcrConversionKHR" ) );
+      vkCreateSamplerYcbcrConversion = PFN_vkCreateSamplerYcbcrConversion( vkGetInstanceProcAddr( instance, "vkCreateSamplerYcbcrConversion" ) );
+      if ( !vkCreateSamplerYcbcrConversion ) vkCreateSamplerYcbcrConversion = vkCreateSamplerYcbcrConversionKHR;
+      vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetInstanceProcAddr( instance, "vkCreateSemaphore" ) );
+      vkCreateShaderModule = PFN_vkCreateShaderModule( vkGetInstanceProcAddr( instance, "vkCreateShaderModule" ) );
+      vkCreateSharedSwapchainsKHR = PFN_vkCreateSharedSwapchainsKHR( vkGetInstanceProcAddr( instance, "vkCreateSharedSwapchainsKHR" ) );
+      vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR( vkGetInstanceProcAddr( instance, "vkCreateSwapchainKHR" ) );
+      vkCreateValidationCacheEXT = PFN_vkCreateValidationCacheEXT( vkGetInstanceProcAddr( instance, "vkCreateValidationCacheEXT" ) );
+      vkDebugMarkerSetObjectNameEXT = PFN_vkDebugMarkerSetObjectNameEXT( vkGetInstanceProcAddr( instance, "vkDebugMarkerSetObjectNameEXT" ) );
+      vkDebugMarkerSetObjectTagEXT = PFN_vkDebugMarkerSetObjectTagEXT( vkGetInstanceProcAddr( instance, "vkDebugMarkerSetObjectTagEXT" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+      vkDeferredOperationJoinKHR = PFN_vkDeferredOperationJoinKHR( vkGetInstanceProcAddr( instance, "vkDeferredOperationJoinKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+      vkDestroyAccelerationStructureNV = PFN_vkDestroyAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkDestroyAccelerationStructureNV" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+      vkDestroyAccelerationStructureKHR = PFN_vkDestroyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkDestroyAccelerationStructureKHR" ) );
+      if ( !vkDestroyAccelerationStructureKHR ) vkDestroyAccelerationStructureKHR = vkDestroyAccelerationStructureNV;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+      vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetInstanceProcAddr( instance, "vkDestroyBuffer" ) );
+      vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetInstanceProcAddr( instance, "vkDestroyBufferView" ) );
+      vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetInstanceProcAddr( instance, "vkDestroyCommandPool" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+      vkDestroyDeferredOperationKHR = PFN_vkDestroyDeferredOperationKHR( vkGetInstanceProcAddr( instance, "vkDestroyDeferredOperationKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+      vkDestroyDescriptorPool = PFN_vkDestroyDescriptorPool( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorPool" ) );
+      vkDestroyDescriptorSetLayout = PFN_vkDestroyDescriptorSetLayout( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorSetLayout" ) );
+      vkDestroyDescriptorUpdateTemplateKHR = PFN_vkDestroyDescriptorUpdateTemplateKHR( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorUpdateTemplateKHR" ) );
+      vkDestroyDescriptorUpdateTemplate = PFN_vkDestroyDescriptorUpdateTemplate( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorUpdateTemplate" ) );
+      if ( !vkDestroyDescriptorUpdateTemplate ) vkDestroyDescriptorUpdateTemplate = vkDestroyDescriptorUpdateTemplateKHR;
+      vkDestroyDevice = PFN_vkDestroyDevice( vkGetInstanceProcAddr( instance, "vkDestroyDevice" ) );
+      vkDestroyEvent = PFN_vkDestroyEvent( vkGetInstanceProcAddr( instance, "vkDestroyEvent" ) );
+      vkDestroyFence = PFN_vkDestroyFence( vkGetInstanceProcAddr( instance, "vkDestroyFence" ) );
+      vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetInstanceProcAddr( instance, "vkDestroyFramebuffer" ) );
+      vkDestroyImage = PFN_vkDestroyImage( vkGetInstanceProcAddr( instance, "vkDestroyImage" ) );
+      vkDestroyImageView = PFN_vkDestroyImageView( vkGetInstanceProcAddr( instance, "vkDestroyImageView" ) );
+      vkDestroyIndirectCommandsLayoutNV = PFN_vkDestroyIndirectCommandsLayoutNV( vkGetInstanceProcAddr( instance, "vkDestroyIndirectCommandsLayoutNV" ) );
+      vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetInstanceProcAddr( instance, "vkDestroyPipeline" ) );
+      vkDestroyPipelineCache = PFN_vkDestroyPipelineCache( vkGetInstanceProcAddr( instance, "vkDestroyPipelineCache" ) );
+      vkDestroyPipelineLayout = PFN_vkDestroyPipelineLayout( vkGetInstanceProcAddr( instance, "vkDestroyPipelineLayout" ) );
+      vkDestroyPrivateDataSlotEXT = PFN_vkDestroyPrivateDataSlotEXT( vkGetInstanceProcAddr( instance, "vkDestroyPrivateDataSlotEXT" ) );
+      vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetInstanceProcAddr( instance, "vkDestroyQueryPool" ) );
+      vkDestroyRenderPass = PFN_vkDestroyRenderPass( vkGetInstanceProcAddr( instance, "vkDestroyRenderPass" ) );
+      vkDestroySampler = PFN_vkDestroySampler( vkGetInstanceProcAddr( instance, "vkDestroySampler" ) );
+      vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR( vkGetInstanceProcAddr( instance, "vkDestroySamplerYcbcrConversionKHR" ) );
+      vkDestroySamplerYcbcrConversion = PFN_vkDestroySamplerYcbcrConversion( vkGetInstanceProcAddr( instance, "vkDestroySamplerYcbcrConversion" ) );
+      if ( !vkDestroySamplerYcbcrConversion ) vkDestroySamplerYcbcrConversion = vkDestroySamplerYcbcrConversionKHR;
+      vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetInstanceProcAddr( instance, "vkDestroySemaphore" ) );
+      vkDestroyShaderModule = PFN_vkDestroyShaderModule( vkGetInstanceProcAddr( instance, "vkDestroyShaderModule" ) );
+      vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR( vkGetInstanceProcAddr( instance, "vkDestroySwapchainKHR" ) );
+      vkDestroyValidationCacheEXT = PFN_vkDestroyValidationCacheEXT( vkGetInstanceProcAddr( instance, "vkDestroyValidationCacheEXT" ) );
+      vkDeviceWaitIdle = PFN_vkDeviceWaitIdle( vkGetInstanceProcAddr( instance, "vkDeviceWaitIdle" ) );
+      vkDisplayPowerControlEXT = PFN_vkDisplayPowerControlEXT( vkGetInstanceProcAddr( instance, "vkDisplayPowerControlEXT" ) );
+      vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetInstanceProcAddr( instance, "vkEndCommandBuffer" ) );
+      vkFlushMappedMemoryRanges = PFN_vkFlushMappedMemoryRanges( vkGetInstanceProcAddr( instance, "vkFlushMappedMemoryRanges" ) );
+      vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetInstanceProcAddr( instance, "vkFreeCommandBuffers" ) );
+      vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetInstanceProcAddr( instance, "vkFreeDescriptorSets" ) );
+      vkFreeMemory = PFN_vkFreeMemory( vkGetInstanceProcAddr( instance, "vkFreeMemory" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+      vkGetAccelerationStructureDeviceAddressKHR = PFN_vkGetAccelerationStructureDeviceAddressKHR( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureDeviceAddressKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+      vkGetAccelerationStructureHandleNV = PFN_vkGetAccelerationStructureHandleNV( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureHandleNV" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+      vkGetAccelerationStructureMemoryRequirementsKHR = PFN_vkGetAccelerationStructureMemoryRequirementsKHR( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureMemoryRequirementsKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+      vkGetAccelerationStructureMemoryRequirementsNV = PFN_vkGetAccelerationStructureMemoryRequirementsNV( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureMemoryRequirementsNV" ) );
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+      vkGetAndroidHardwareBufferPropertiesANDROID = PFN_vkGetAndroidHardwareBufferPropertiesANDROID( vkGetInstanceProcAddr( instance, "vkGetAndroidHardwareBufferPropertiesANDROID" ) );
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+      vkGetBufferDeviceAddressEXT = PFN_vkGetBufferDeviceAddressEXT( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddressEXT" ) );
+      vkGetBufferDeviceAddressKHR = PFN_vkGetBufferDeviceAddressKHR( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddressKHR" ) );
+      vkGetBufferDeviceAddress = PFN_vkGetBufferDeviceAddress( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddress" ) );
+      if ( !vkGetBufferDeviceAddress ) vkGetBufferDeviceAddress = vkGetBufferDeviceAddressKHR;
+      if ( !vkGetBufferDeviceAddress ) vkGetBufferDeviceAddress = vkGetBufferDeviceAddressEXT;
+      vkGetBufferMemoryRequirements = PFN_vkGetBufferMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements" ) );
+      vkGetBufferMemoryRequirements2KHR = PFN_vkGetBufferMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements2KHR" ) );
+      vkGetBufferMemoryRequirements2 = PFN_vkGetBufferMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements2" ) );
+      if ( !vkGetBufferMemoryRequirements2 ) vkGetBufferMemoryRequirements2 = vkGetBufferMemoryRequirements2KHR;
+      vkGetBufferOpaqueCaptureAddressKHR = PFN_vkGetBufferOpaqueCaptureAddressKHR( vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureAddressKHR" ) );
+      vkGetBufferOpaqueCaptureAddress = PFN_vkGetBufferOpaqueCaptureAddress( vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureAddress" ) );
+      if ( !vkGetBufferOpaqueCaptureAddress ) vkGetBufferOpaqueCaptureAddress = vkGetBufferOpaqueCaptureAddressKHR;
+      vkGetCalibratedTimestampsEXT = PFN_vkGetCalibratedTimestampsEXT( vkGetInstanceProcAddr( instance, "vkGetCalibratedTimestampsEXT" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+      vkGetDeferredOperationMaxConcurrencyKHR = PFN_vkGetDeferredOperationMaxConcurrencyKHR( vkGetInstanceProcAddr( instance, "vkGetDeferredOperationMaxConcurrencyKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+      vkGetDeferredOperationResultKHR = PFN_vkGetDeferredOperationResultKHR( vkGetInstanceProcAddr( instance, "vkGetDeferredOperationResultKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+      vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSupportKHR" ) );
+      vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSupport" ) );
+      if ( !vkGetDescriptorSetLayoutSupport ) vkGetDescriptorSetLayoutSupport = vkGetDescriptorSetLayoutSupportKHR;
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+      vkGetDeviceAccelerationStructureCompatibilityKHR = PFN_vkGetDeviceAccelerationStructureCompatibilityKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+      vkGetDeviceGroupPeerMemoryFeaturesKHR = PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) );
+      vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPeerMemoryFeatures" ) );
+      if ( !vkGetDeviceGroupPeerMemoryFeatures ) vkGetDeviceGroupPeerMemoryFeatures = vkGetDeviceGroupPeerMemoryFeaturesKHR;
+      vkGetDeviceGroupPresentCapabilitiesKHR = PFN_vkGetDeviceGroupPresentCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPresentCapabilitiesKHR" ) );
 #ifdef VK_USE_PLATFORM_WIN32_KHR
-      vkImportFenceWin32HandleKHR = PFN_vkImportFenceWin32HandleKHR(device ? device.getProcAddr( "vkImportFenceWin32HandleKHR") : instance.getProcAddr( "vkImportFenceWin32HandleKHR"));
+      vkGetDeviceGroupSurfacePresentModes2EXT = PFN_vkGetDeviceGroupSurfacePresentModes2EXT( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupSurfacePresentModes2EXT" ) );
 #endif /*VK_USE_PLATFORM_WIN32_KHR*/
-      vkImportSemaphoreFdKHR = PFN_vkImportSemaphoreFdKHR(device ? device.getProcAddr( "vkImportSemaphoreFdKHR") : instance.getProcAddr( "vkImportSemaphoreFdKHR"));
+      vkGetDeviceGroupSurfacePresentModesKHR = PFN_vkGetDeviceGroupSurfacePresentModesKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupSurfacePresentModesKHR" ) );
+      vkGetDeviceMemoryCommitment = PFN_vkGetDeviceMemoryCommitment( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryCommitment" ) );
+      vkGetDeviceMemoryOpaqueCaptureAddressKHR = PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) );
+      vkGetDeviceMemoryOpaqueCaptureAddress = PFN_vkGetDeviceMemoryOpaqueCaptureAddress( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryOpaqueCaptureAddress" ) );
+      if ( !vkGetDeviceMemoryOpaqueCaptureAddress ) vkGetDeviceMemoryOpaqueCaptureAddress = vkGetDeviceMemoryOpaqueCaptureAddressKHR;
+      vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetInstanceProcAddr( instance, "vkGetDeviceProcAddr" ) );
+      vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetInstanceProcAddr( instance, "vkGetDeviceQueue" ) );
+      vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetInstanceProcAddr( instance, "vkGetDeviceQueue2" ) );
+      vkGetEventStatus = PFN_vkGetEventStatus( vkGetInstanceProcAddr( instance, "vkGetEventStatus" ) );
+      vkGetFenceFdKHR = PFN_vkGetFenceFdKHR( vkGetInstanceProcAddr( instance, "vkGetFenceFdKHR" ) );
+      vkGetFenceStatus = PFN_vkGetFenceStatus( vkGetInstanceProcAddr( instance, "vkGetFenceStatus" ) );
 #ifdef VK_USE_PLATFORM_WIN32_KHR
-      vkImportSemaphoreWin32HandleKHR = PFN_vkImportSemaphoreWin32HandleKHR(device ? device.getProcAddr( "vkImportSemaphoreWin32HandleKHR") : instance.getProcAddr( "vkImportSemaphoreWin32HandleKHR"));
+      vkGetFenceWin32HandleKHR = PFN_vkGetFenceWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetFenceWin32HandleKHR" ) );
 #endif /*VK_USE_PLATFORM_WIN32_KHR*/
-      vkInvalidateMappedMemoryRanges = PFN_vkInvalidateMappedMemoryRanges(device ? device.getProcAddr( "vkInvalidateMappedMemoryRanges") : instance.getProcAddr( "vkInvalidateMappedMemoryRanges"));
-      vkMapMemory = PFN_vkMapMemory(device ? device.getProcAddr( "vkMapMemory") : instance.getProcAddr( "vkMapMemory"));
-      vkMergePipelineCaches = PFN_vkMergePipelineCaches(device ? device.getProcAddr( "vkMergePipelineCaches") : instance.getProcAddr( "vkMergePipelineCaches"));
-      vkMergeValidationCachesEXT = PFN_vkMergeValidationCachesEXT(device ? device.getProcAddr( "vkMergeValidationCachesEXT") : instance.getProcAddr( "vkMergeValidationCachesEXT"));
-      vkQueueBeginDebugUtilsLabelEXT = PFN_vkQueueBeginDebugUtilsLabelEXT(device ? device.getProcAddr( "vkQueueBeginDebugUtilsLabelEXT") : instance.getProcAddr( "vkQueueBeginDebugUtilsLabelEXT"));
-      vkQueueBindSparse = PFN_vkQueueBindSparse(device ? device.getProcAddr( "vkQueueBindSparse") : instance.getProcAddr( "vkQueueBindSparse"));
-      vkQueueEndDebugUtilsLabelEXT = PFN_vkQueueEndDebugUtilsLabelEXT(device ? device.getProcAddr( "vkQueueEndDebugUtilsLabelEXT") : instance.getProcAddr( "vkQueueEndDebugUtilsLabelEXT"));
-      vkQueueInsertDebugUtilsLabelEXT = PFN_vkQueueInsertDebugUtilsLabelEXT(device ? device.getProcAddr( "vkQueueInsertDebugUtilsLabelEXT") : instance.getProcAddr( "vkQueueInsertDebugUtilsLabelEXT"));
-      vkQueuePresentKHR = PFN_vkQueuePresentKHR(device ? device.getProcAddr( "vkQueuePresentKHR") : instance.getProcAddr( "vkQueuePresentKHR"));
-      vkQueueSubmit = PFN_vkQueueSubmit(device ? device.getProcAddr( "vkQueueSubmit") : instance.getProcAddr( "vkQueueSubmit"));
-      vkQueueWaitIdle = PFN_vkQueueWaitIdle(device ? device.getProcAddr( "vkQueueWaitIdle") : instance.getProcAddr( "vkQueueWaitIdle"));
-      vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT(device ? device.getProcAddr( "vkRegisterDeviceEventEXT") : instance.getProcAddr( "vkRegisterDeviceEventEXT"));
-      vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT(device ? device.getProcAddr( "vkRegisterDisplayEventEXT") : instance.getProcAddr( "vkRegisterDisplayEventEXT"));
-      vkRegisterObjectsNVX = PFN_vkRegisterObjectsNVX(device ? device.getProcAddr( "vkRegisterObjectsNVX") : instance.getProcAddr( "vkRegisterObjectsNVX"));
-      vkReleaseDisplayEXT = PFN_vkReleaseDisplayEXT(instance.getProcAddr( "vkReleaseDisplayEXT"));
-      vkResetCommandBuffer = PFN_vkResetCommandBuffer(device ? device.getProcAddr( "vkResetCommandBuffer") : instance.getProcAddr( "vkResetCommandBuffer"));
-      vkResetCommandPool = PFN_vkResetCommandPool(device ? device.getProcAddr( "vkResetCommandPool") : instance.getProcAddr( "vkResetCommandPool"));
-      vkResetDescriptorPool = PFN_vkResetDescriptorPool(device ? device.getProcAddr( "vkResetDescriptorPool") : instance.getProcAddr( "vkResetDescriptorPool"));
-      vkResetEvent = PFN_vkResetEvent(device ? device.getProcAddr( "vkResetEvent") : instance.getProcAddr( "vkResetEvent"));
-      vkResetFences = PFN_vkResetFences(device ? device.getProcAddr( "vkResetFences") : instance.getProcAddr( "vkResetFences"));
-      vkSetDebugUtilsObjectNameEXT = PFN_vkSetDebugUtilsObjectNameEXT(device ? device.getProcAddr( "vkSetDebugUtilsObjectNameEXT") : instance.getProcAddr( "vkSetDebugUtilsObjectNameEXT"));
-      vkSetDebugUtilsObjectTagEXT = PFN_vkSetDebugUtilsObjectTagEXT(device ? device.getProcAddr( "vkSetDebugUtilsObjectTagEXT") : instance.getProcAddr( "vkSetDebugUtilsObjectTagEXT"));
-      vkSetEvent = PFN_vkSetEvent(device ? device.getProcAddr( "vkSetEvent") : instance.getProcAddr( "vkSetEvent"));
-      vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT(device ? device.getProcAddr( "vkSetHdrMetadataEXT") : instance.getProcAddr( "vkSetHdrMetadataEXT"));
-      vkSubmitDebugUtilsMessageEXT = PFN_vkSubmitDebugUtilsMessageEXT(instance.getProcAddr( "vkSubmitDebugUtilsMessageEXT"));
-      vkTrimCommandPool = PFN_vkTrimCommandPool(device ? device.getProcAddr( "vkTrimCommandPool") : instance.getProcAddr( "vkTrimCommandPool"));
-      vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR(device ? device.getProcAddr( "vkTrimCommandPoolKHR") : instance.getProcAddr( "vkTrimCommandPoolKHR"));
-      vkUnmapMemory = PFN_vkUnmapMemory(device ? device.getProcAddr( "vkUnmapMemory") : instance.getProcAddr( "vkUnmapMemory"));
-      vkUnregisterObjectsNVX = PFN_vkUnregisterObjectsNVX(device ? device.getProcAddr( "vkUnregisterObjectsNVX") : instance.getProcAddr( "vkUnregisterObjectsNVX"));
-      vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate(device ? device.getProcAddr( "vkUpdateDescriptorSetWithTemplate") : instance.getProcAddr( "vkUpdateDescriptorSetWithTemplate"));
-      vkUpdateDescriptorSetWithTemplateKHR = PFN_vkUpdateDescriptorSetWithTemplateKHR(device ? device.getProcAddr( "vkUpdateDescriptorSetWithTemplateKHR") : instance.getProcAddr( "vkUpdateDescriptorSetWithTemplateKHR"));
-      vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets(device ? device.getProcAddr( "vkUpdateDescriptorSets") : instance.getProcAddr( "vkUpdateDescriptorSets"));
-      vkWaitForFences = PFN_vkWaitForFences(device ? device.getProcAddr( "vkWaitForFences") : instance.getProcAddr( "vkWaitForFences"));
+      vkGetGeneratedCommandsMemoryRequirementsNV = PFN_vkGetGeneratedCommandsMemoryRequirementsNV( vkGetInstanceProcAddr( instance, "vkGetGeneratedCommandsMemoryRequirementsNV" ) );
+      vkGetImageDrmFormatModifierPropertiesEXT = PFN_vkGetImageDrmFormatModifierPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetImageDrmFormatModifierPropertiesEXT" ) );
+      vkGetImageMemoryRequirements = PFN_vkGetImageMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements" ) );
+      vkGetImageMemoryRequirements2KHR = PFN_vkGetImageMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements2KHR" ) );
+      vkGetImageMemoryRequirements2 = PFN_vkGetImageMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements2" ) );
+      if ( !vkGetImageMemoryRequirements2 ) vkGetImageMemoryRequirements2 = vkGetImageMemoryRequirements2KHR;
+      vkGetImageSparseMemoryRequirements = PFN_vkGetImageSparseMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements" ) );
+      vkGetImageSparseMemoryRequirements2KHR = PFN_vkGetImageSparseMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements2KHR" ) );
+      vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements2" ) );
+      if ( !vkGetImageSparseMemoryRequirements2 ) vkGetImageSparseMemoryRequirements2 = vkGetImageSparseMemoryRequirements2KHR;
+      vkGetImageSubresourceLayout = PFN_vkGetImageSubresourceLayout( vkGetInstanceProcAddr( instance, "vkGetImageSubresourceLayout" ) );
+      vkGetImageViewAddressNVX = PFN_vkGetImageViewAddressNVX( vkGetInstanceProcAddr( instance, "vkGetImageViewAddressNVX" ) );
+      vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetInstanceProcAddr( instance, "vkGetImageViewHandleNVX" ) );
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+      vkGetMemoryAndroidHardwareBufferANDROID = PFN_vkGetMemoryAndroidHardwareBufferANDROID( vkGetInstanceProcAddr( instance, "vkGetMemoryAndroidHardwareBufferANDROID" ) );
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+      vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryFdKHR" ) );
+      vkGetMemoryFdPropertiesKHR = PFN_vkGetMemoryFdPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryFdPropertiesKHR" ) );
+      vkGetMemoryHostPointerPropertiesEXT = PFN_vkGetMemoryHostPointerPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetMemoryHostPointerPropertiesEXT" ) );
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+      vkGetMemoryWin32HandleKHR = PFN_vkGetMemoryWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandleKHR" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+      vkGetMemoryWin32HandleNV = PFN_vkGetMemoryWin32HandleNV( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandleNV" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+      vkGetMemoryWin32HandlePropertiesKHR = PFN_vkGetMemoryWin32HandlePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandlePropertiesKHR" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+      vkGetPastPresentationTimingGOOGLE = PFN_vkGetPastPresentationTimingGOOGLE( vkGetInstanceProcAddr( instance, "vkGetPastPresentationTimingGOOGLE" ) );
+      vkGetPerformanceParameterINTEL = PFN_vkGetPerformanceParameterINTEL( vkGetInstanceProcAddr( instance, "vkGetPerformanceParameterINTEL" ) );
+      vkGetPipelineCacheData = PFN_vkGetPipelineCacheData( vkGetInstanceProcAddr( instance, "vkGetPipelineCacheData" ) );
+      vkGetPipelineExecutableInternalRepresentationsKHR = PFN_vkGetPipelineExecutableInternalRepresentationsKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutableInternalRepresentationsKHR" ) );
+      vkGetPipelineExecutablePropertiesKHR = PFN_vkGetPipelineExecutablePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutablePropertiesKHR" ) );
+      vkGetPipelineExecutableStatisticsKHR = PFN_vkGetPipelineExecutableStatisticsKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutableStatisticsKHR" ) );
+      vkGetPrivateDataEXT = PFN_vkGetPrivateDataEXT( vkGetInstanceProcAddr( instance, "vkGetPrivateDataEXT" ) );
+      vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetInstanceProcAddr( instance, "vkGetQueryPoolResults" ) );
+      vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetInstanceProcAddr( instance, "vkGetQueueCheckpointDataNV" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+      vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( vkGetInstanceProcAddr( instance, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+      vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupHandlesNV" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+      vkGetRayTracingShaderGroupHandlesKHR = PFN_vkGetRayTracingShaderGroupHandlesKHR( vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupHandlesKHR" ) );
+      if ( !vkGetRayTracingShaderGroupHandlesKHR ) vkGetRayTracingShaderGroupHandlesKHR = vkGetRayTracingShaderGroupHandlesNV;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+      vkGetRefreshCycleDurationGOOGLE = PFN_vkGetRefreshCycleDurationGOOGLE( vkGetInstanceProcAddr( instance, "vkGetRefreshCycleDurationGOOGLE" ) );
+      vkGetRenderAreaGranularity = PFN_vkGetRenderAreaGranularity( vkGetInstanceProcAddr( instance, "vkGetRenderAreaGranularity" ) );
+      vkGetSemaphoreCounterValueKHR = PFN_vkGetSemaphoreCounterValueKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreCounterValueKHR" ) );
+      vkGetSemaphoreCounterValue = PFN_vkGetSemaphoreCounterValue( vkGetInstanceProcAddr( instance, "vkGetSemaphoreCounterValue" ) );
+      if ( !vkGetSemaphoreCounterValue ) vkGetSemaphoreCounterValue = vkGetSemaphoreCounterValueKHR;
+      vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreFdKHR" ) );
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+      vkGetSemaphoreWin32HandleKHR = PFN_vkGetSemaphoreWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreWin32HandleKHR" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+      vkGetShaderInfoAMD = PFN_vkGetShaderInfoAMD( vkGetInstanceProcAddr( instance, "vkGetShaderInfoAMD" ) );
+      vkGetSwapchainCounterEXT = PFN_vkGetSwapchainCounterEXT( vkGetInstanceProcAddr( instance, "vkGetSwapchainCounterEXT" ) );
+      vkGetSwapchainImagesKHR = PFN_vkGetSwapchainImagesKHR( vkGetInstanceProcAddr( instance, "vkGetSwapchainImagesKHR" ) );
+      vkGetSwapchainStatusKHR = PFN_vkGetSwapchainStatusKHR( vkGetInstanceProcAddr( instance, "vkGetSwapchainStatusKHR" ) );
+      vkGetValidationCacheDataEXT = PFN_vkGetValidationCacheDataEXT( vkGetInstanceProcAddr( instance, "vkGetValidationCacheDataEXT" ) );
+      vkImportFenceFdKHR = PFN_vkImportFenceFdKHR( vkGetInstanceProcAddr( instance, "vkImportFenceFdKHR" ) );
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+      vkImportFenceWin32HandleKHR = PFN_vkImportFenceWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkImportFenceWin32HandleKHR" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+      vkImportSemaphoreFdKHR = PFN_vkImportSemaphoreFdKHR( vkGetInstanceProcAddr( instance, "vkImportSemaphoreFdKHR" ) );
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+      vkImportSemaphoreWin32HandleKHR = PFN_vkImportSemaphoreWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkImportSemaphoreWin32HandleKHR" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+      vkInitializePerformanceApiINTEL = PFN_vkInitializePerformanceApiINTEL( vkGetInstanceProcAddr( instance, "vkInitializePerformanceApiINTEL" ) );
+      vkInvalidateMappedMemoryRanges = PFN_vkInvalidateMappedMemoryRanges( vkGetInstanceProcAddr( instance, "vkInvalidateMappedMemoryRanges" ) );
+      vkMapMemory = PFN_vkMapMemory( vkGetInstanceProcAddr( instance, "vkMapMemory" ) );
+      vkMergePipelineCaches = PFN_vkMergePipelineCaches( vkGetInstanceProcAddr( instance, "vkMergePipelineCaches" ) );
+      vkMergeValidationCachesEXT = PFN_vkMergeValidationCachesEXT( vkGetInstanceProcAddr( instance, "vkMergeValidationCachesEXT" ) );
+      vkQueueBeginDebugUtilsLabelEXT = PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueBeginDebugUtilsLabelEXT" ) );
+      vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetInstanceProcAddr( instance, "vkQueueBindSparse" ) );
+      vkQueueEndDebugUtilsLabelEXT = PFN_vkQueueEndDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueEndDebugUtilsLabelEXT" ) );
+      vkQueueInsertDebugUtilsLabelEXT = PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueInsertDebugUtilsLabelEXT" ) );
+      vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetInstanceProcAddr( instance, "vkQueuePresentKHR" ) );
+      vkQueueSetPerformanceConfigurationINTEL = PFN_vkQueueSetPerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkQueueSetPerformanceConfigurationINTEL" ) );
+      vkQueueSubmit = PFN_vkQueueSubmit( vkGetInstanceProcAddr( instance, "vkQueueSubmit" ) );
+      vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetInstanceProcAddr( instance, "vkQueueWaitIdle" ) );
+      vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT( vkGetInstanceProcAddr( instance, "vkRegisterDeviceEventEXT" ) );
+      vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT( vkGetInstanceProcAddr( instance, "vkRegisterDisplayEventEXT" ) );
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+      vkReleaseFullScreenExclusiveModeEXT = PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetInstanceProcAddr( instance, "vkReleaseFullScreenExclusiveModeEXT" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+      vkReleasePerformanceConfigurationINTEL = PFN_vkReleasePerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkReleasePerformanceConfigurationINTEL" ) );
+      vkReleaseProfilingLockKHR = PFN_vkReleaseProfilingLockKHR( vkGetInstanceProcAddr( instance, "vkReleaseProfilingLockKHR" ) );
+      vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetInstanceProcAddr( instance, "vkResetCommandBuffer" ) );
+      vkResetCommandPool = PFN_vkResetCommandPool( vkGetInstanceProcAddr( instance, "vkResetCommandPool" ) );
+      vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetInstanceProcAddr( instance, "vkResetDescriptorPool" ) );
+      vkResetEvent = PFN_vkResetEvent( vkGetInstanceProcAddr( instance, "vkResetEvent" ) );
+      vkResetFences = PFN_vkResetFences( vkGetInstanceProcAddr( instance, "vkResetFences" ) );
+      vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetInstanceProcAddr( instance, "vkResetQueryPoolEXT" ) );
+      vkResetQueryPool = PFN_vkResetQueryPool( vkGetInstanceProcAddr( instance, "vkResetQueryPool" ) );
+      if ( !vkResetQueryPool ) vkResetQueryPool = vkResetQueryPoolEXT;
+      vkSetDebugUtilsObjectNameEXT = PFN_vkSetDebugUtilsObjectNameEXT( vkGetInstanceProcAddr( instance, "vkSetDebugUtilsObjectNameEXT" ) );
+      vkSetDebugUtilsObjectTagEXT = PFN_vkSetDebugUtilsObjectTagEXT( vkGetInstanceProcAddr( instance, "vkSetDebugUtilsObjectTagEXT" ) );
+      vkSetEvent = PFN_vkSetEvent( vkGetInstanceProcAddr( instance, "vkSetEvent" ) );
+      vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetInstanceProcAddr( instance, "vkSetHdrMetadataEXT" ) );
+      vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetInstanceProcAddr( instance, "vkSetLocalDimmingAMD" ) );
+      vkSetPrivateDataEXT = PFN_vkSetPrivateDataEXT( vkGetInstanceProcAddr( instance, "vkSetPrivateDataEXT" ) );
+      vkSignalSemaphoreKHR = PFN_vkSignalSemaphoreKHR( vkGetInstanceProcAddr( instance, "vkSignalSemaphoreKHR" ) );
+      vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetInstanceProcAddr( instance, "vkSignalSemaphore" ) );
+      if ( !vkSignalSemaphore ) vkSignalSemaphore = vkSignalSemaphoreKHR;
+      vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetInstanceProcAddr( instance, "vkTrimCommandPoolKHR" ) );
+      vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetInstanceProcAddr( instance, "vkTrimCommandPool" ) );
+      if ( !vkTrimCommandPool ) vkTrimCommandPool = vkTrimCommandPoolKHR;
+      vkUninitializePerformanceApiINTEL = PFN_vkUninitializePerformanceApiINTEL( vkGetInstanceProcAddr( instance, "vkUninitializePerformanceApiINTEL" ) );
+      vkUnmapMemory = PFN_vkUnmapMemory( vkGetInstanceProcAddr( instance, "vkUnmapMemory" ) );
+      vkUpdateDescriptorSetWithTemplateKHR = PFN_vkUpdateDescriptorSetWithTemplateKHR( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSetWithTemplateKHR" ) );
+      vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSetWithTemplate" ) );
+      if ( !vkUpdateDescriptorSetWithTemplate ) vkUpdateDescriptorSetWithTemplate = vkUpdateDescriptorSetWithTemplateKHR;
+      vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSets" ) );
+      vkWaitForFences = PFN_vkWaitForFences( vkGetInstanceProcAddr( instance, "vkWaitForFences" ) );
+      vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetInstanceProcAddr( instance, "vkWaitSemaphoresKHR" ) );
+      vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetInstanceProcAddr( instance, "vkWaitSemaphores" ) );
+      if ( !vkWaitSemaphores ) vkWaitSemaphores = vkWaitSemaphoresKHR;
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+      vkWriteAccelerationStructuresPropertiesKHR = PFN_vkWriteAccelerationStructuresPropertiesKHR( vkGetInstanceProcAddr( instance, "vkWriteAccelerationStructuresPropertiesKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+    }
+
+    void init( VULKAN_HPP_NAMESPACE::Device deviceCpp ) VULKAN_HPP_NOEXCEPT
+    {
+      VkDevice device = static_cast<VkDevice>(deviceCpp);
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+      vkAcquireFullScreenExclusiveModeEXT = PFN_vkAcquireFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkAcquireFullScreenExclusiveModeEXT" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+      vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR( vkGetDeviceProcAddr( device, "vkAcquireNextImage2KHR" ) );
+      vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetDeviceProcAddr( device, "vkAcquireNextImageKHR" ) );
+      vkAcquirePerformanceConfigurationINTEL = PFN_vkAcquirePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkAcquirePerformanceConfigurationINTEL" ) );
+      vkAcquireProfilingLockKHR = PFN_vkAcquireProfilingLockKHR( vkGetDeviceProcAddr( device, "vkAcquireProfilingLockKHR" ) );
+      vkAllocateCommandBuffers = PFN_vkAllocateCommandBuffers( vkGetDeviceProcAddr( device, "vkAllocateCommandBuffers" ) );
+      vkAllocateDescriptorSets = PFN_vkAllocateDescriptorSets( vkGetDeviceProcAddr( device, "vkAllocateDescriptorSets" ) );
+      vkAllocateMemory = PFN_vkAllocateMemory( vkGetDeviceProcAddr( device, "vkAllocateMemory" ) );
+      vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetDeviceProcAddr( device, "vkBeginCommandBuffer" ) );
+      vkBindAccelerationStructureMemoryNV = PFN_vkBindAccelerationStructureMemoryNV( vkGetDeviceProcAddr( device, "vkBindAccelerationStructureMemoryNV" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+      vkBindAccelerationStructureMemoryKHR = PFN_vkBindAccelerationStructureMemoryKHR( vkGetDeviceProcAddr( device, "vkBindAccelerationStructureMemoryKHR" ) );
+      if ( !vkBindAccelerationStructureMemoryKHR ) vkBindAccelerationStructureMemoryKHR = vkBindAccelerationStructureMemoryNV;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+      vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetDeviceProcAddr( device, "vkBindBufferMemory" ) );
+      vkBindBufferMemory2KHR = PFN_vkBindBufferMemory2KHR( vkGetDeviceProcAddr( device, "vkBindBufferMemory2KHR" ) );
+      vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetDeviceProcAddr( device, "vkBindBufferMemory2" ) );
+      if ( !vkBindBufferMemory2 ) vkBindBufferMemory2 = vkBindBufferMemory2KHR;
+      vkBindImageMemory = PFN_vkBindImageMemory( vkGetDeviceProcAddr( device, "vkBindImageMemory" ) );
+      vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetDeviceProcAddr( device, "vkBindImageMemory2KHR" ) );
+      vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetDeviceProcAddr( device, "vkBindImageMemory2" ) );
+      if ( !vkBindImageMemory2 ) vkBindImageMemory2 = vkBindImageMemory2KHR;
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+      vkBuildAccelerationStructureKHR = PFN_vkBuildAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkBuildAccelerationStructureKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+      vkCmdBeginConditionalRenderingEXT = PFN_vkCmdBeginConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdBeginConditionalRenderingEXT" ) );
+      vkCmdBeginDebugUtilsLabelEXT = PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdBeginDebugUtilsLabelEXT" ) );
+      vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetDeviceProcAddr( device, "vkCmdBeginQuery" ) );
+      vkCmdBeginQueryIndexedEXT = PFN_vkCmdBeginQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdBeginQueryIndexedEXT" ) );
+      vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass" ) );
+      vkCmdBeginRenderPass2KHR = PFN_vkCmdBeginRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2KHR" ) );
+      vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2" ) );
+      if ( !vkCmdBeginRenderPass2 ) vkCmdBeginRenderPass2 = vkCmdBeginRenderPass2KHR;
+      vkCmdBeginTransformFeedbackEXT = PFN_vkCmdBeginTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdBeginTransformFeedbackEXT" ) );
+      vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets" ) );
+      vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer" ) );
+      vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetDeviceProcAddr( device, "vkCmdBindPipeline" ) );
+      vkCmdBindPipelineShaderGroupNV = PFN_vkCmdBindPipelineShaderGroupNV( vkGetDeviceProcAddr( device, "vkCmdBindPipelineShaderGroupNV" ) );
+      vkCmdBindShadingRateImageNV = PFN_vkCmdBindShadingRateImageNV( vkGetDeviceProcAddr( device, "vkCmdBindShadingRateImageNV" ) );
+      vkCmdBindTransformFeedbackBuffersEXT = PFN_vkCmdBindTransformFeedbackBuffersEXT( vkGetDeviceProcAddr( device, "vkCmdBindTransformFeedbackBuffersEXT" ) );
+      vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers" ) );
+      vkCmdBindVertexBuffers2EXT = PFN_vkCmdBindVertexBuffers2EXT( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2EXT" ) );
+      vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetDeviceProcAddr( device, "vkCmdBlitImage" ) );
+      vkCmdBlitImage2KHR = PFN_vkCmdBlitImage2KHR( vkGetDeviceProcAddr( device, "vkCmdBlitImage2KHR" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+      vkCmdBuildAccelerationStructureIndirectKHR = PFN_vkCmdBuildAccelerationStructureIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructureIndirectKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+      vkCmdBuildAccelerationStructureKHR = PFN_vkCmdBuildAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructureKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+      vkCmdBuildAccelerationStructureNV = PFN_vkCmdBuildAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructureNV" ) );
+      vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetDeviceProcAddr( device, "vkCmdClearAttachments" ) );
+      vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetDeviceProcAddr( device, "vkCmdClearColorImage" ) );
+      vkCmdClearDepthStencilImage = PFN_vkCmdClearDepthStencilImage( vkGetDeviceProcAddr( device, "vkCmdClearDepthStencilImage" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+      vkCmdCopyAccelerationStructureKHR = PFN_vkCmdCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+      vkCmdCopyAccelerationStructureNV = PFN_vkCmdCopyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureNV" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+      vkCmdCopyAccelerationStructureToMemoryKHR = PFN_vkCmdCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureToMemoryKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+      vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer" ) );
+      vkCmdCopyBuffer2KHR = PFN_vkCmdCopyBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer2KHR" ) );
+      vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage" ) );
+      vkCmdCopyBufferToImage2KHR = PFN_vkCmdCopyBufferToImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage2KHR" ) );
+      vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetDeviceProcAddr( device, "vkCmdCopyImage" ) );
+      vkCmdCopyImage2KHR = PFN_vkCmdCopyImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImage2KHR" ) );
+      vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer" ) );
+      vkCmdCopyImageToBuffer2KHR = PFN_vkCmdCopyImageToBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer2KHR" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+      vkCmdCopyMemoryToAccelerationStructureKHR = PFN_vkCmdCopyMemoryToAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToAccelerationStructureKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+      vkCmdCopyQueryPoolResults = PFN_vkCmdCopyQueryPoolResults( vkGetDeviceProcAddr( device, "vkCmdCopyQueryPoolResults" ) );
+      vkCmdDebugMarkerBeginEXT = PFN_vkCmdDebugMarkerBeginEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerBeginEXT" ) );
+      vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerEndEXT" ) );
+      vkCmdDebugMarkerInsertEXT = PFN_vkCmdDebugMarkerInsertEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerInsertEXT" ) );
+      vkCmdDispatch = PFN_vkCmdDispatch( vkGetDeviceProcAddr( device, "vkCmdDispatch" ) );
+      vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetDeviceProcAddr( device, "vkCmdDispatchBaseKHR" ) );
+      vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetDeviceProcAddr( device, "vkCmdDispatchBase" ) );
+      if ( !vkCmdDispatchBase ) vkCmdDispatchBase = vkCmdDispatchBaseKHR;
+      vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetDeviceProcAddr( device, "vkCmdDispatchIndirect" ) );
+      vkCmdDraw = PFN_vkCmdDraw( vkGetDeviceProcAddr( device, "vkCmdDraw" ) );
+      vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetDeviceProcAddr( device, "vkCmdDrawIndexed" ) );
+      vkCmdDrawIndexedIndirect = PFN_vkCmdDrawIndexedIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirect" ) );
+      vkCmdDrawIndexedIndirectCountAMD = PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountAMD" ) );
+      vkCmdDrawIndexedIndirectCountKHR = PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountKHR" ) );
+      vkCmdDrawIndexedIndirectCount = PFN_vkCmdDrawIndexedIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCount" ) );
+      if ( !vkCmdDrawIndexedIndirectCount ) vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountKHR;
+      if ( !vkCmdDrawIndexedIndirectCount ) vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountAMD;
+      vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndirect" ) );
+      vkCmdDrawIndirectByteCountEXT = PFN_vkCmdDrawIndirectByteCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectByteCountEXT" ) );
+      vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountAMD" ) );
+      vkCmdDrawIndirectCountKHR = PFN_vkCmdDrawIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountKHR" ) );
+      vkCmdDrawIndirectCount = PFN_vkCmdDrawIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCount" ) );
+      if ( !vkCmdDrawIndirectCount ) vkCmdDrawIndirectCount = vkCmdDrawIndirectCountKHR;
+      if ( !vkCmdDrawIndirectCount ) vkCmdDrawIndirectCount = vkCmdDrawIndirectCountAMD;
+      vkCmdDrawMeshTasksIndirectCountNV = PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountNV" ) );
+      vkCmdDrawMeshTasksIndirectNV = PFN_vkCmdDrawMeshTasksIndirectNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectNV" ) );
+      vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksNV" ) );
+      vkCmdEndConditionalRenderingEXT = PFN_vkCmdEndConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdEndConditionalRenderingEXT" ) );
+      vkCmdEndDebugUtilsLabelEXT = PFN_vkCmdEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdEndDebugUtilsLabelEXT" ) );
+      vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetDeviceProcAddr( device, "vkCmdEndQuery" ) );
+      vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdEndQueryIndexedEXT" ) );
+      vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass" ) );
+      vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2KHR" ) );
+      vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2" ) );
+      if ( !vkCmdEndRenderPass2 ) vkCmdEndRenderPass2 = vkCmdEndRenderPass2KHR;
+      vkCmdEndTransformFeedbackEXT = PFN_vkCmdEndTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdEndTransformFeedbackEXT" ) );
+      vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetDeviceProcAddr( device, "vkCmdExecuteCommands" ) );
+      vkCmdExecuteGeneratedCommandsNV = PFN_vkCmdExecuteGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdExecuteGeneratedCommandsNV" ) );
+      vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetDeviceProcAddr( device, "vkCmdFillBuffer" ) );
+      vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdInsertDebugUtilsLabelEXT" ) );
+      vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetDeviceProcAddr( device, "vkCmdNextSubpass" ) );
+      vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2KHR" ) );
+      vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2" ) );
+      if ( !vkCmdNextSubpass2 ) vkCmdNextSubpass2 = vkCmdNextSubpass2KHR;
+      vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier" ) );
+      vkCmdPreprocessGeneratedCommandsNV = PFN_vkCmdPreprocessGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdPreprocessGeneratedCommandsNV" ) );
+      vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetDeviceProcAddr( device, "vkCmdPushConstants" ) );
+      vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetKHR" ) );
+      vkCmdPushDescriptorSetWithTemplateKHR = PFN_vkCmdPushDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplateKHR" ) );
+      vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetDeviceProcAddr( device, "vkCmdResetEvent" ) );
+      vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetDeviceProcAddr( device, "vkCmdResetQueryPool" ) );
+      vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetDeviceProcAddr( device, "vkCmdResolveImage" ) );
+      vkCmdResolveImage2KHR = PFN_vkCmdResolveImage2KHR( vkGetDeviceProcAddr( device, "vkCmdResolveImage2KHR" ) );
+      vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants( vkGetDeviceProcAddr( device, "vkCmdSetBlendConstants" ) );
+      vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetDeviceProcAddr( device, "vkCmdSetCheckpointNV" ) );
+      vkCmdSetCoarseSampleOrderNV = PFN_vkCmdSetCoarseSampleOrderNV( vkGetDeviceProcAddr( device, "vkCmdSetCoarseSampleOrderNV" ) );
+      vkCmdSetCullModeEXT = PFN_vkCmdSetCullModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetCullModeEXT" ) );
+      vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetDeviceProcAddr( device, "vkCmdSetDepthBias" ) );
+      vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetDeviceProcAddr( device, "vkCmdSetDepthBounds" ) );
+      vkCmdSetDepthBoundsTestEnableEXT = PFN_vkCmdSetDepthBoundsTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnableEXT" ) );
+      vkCmdSetDepthCompareOpEXT = PFN_vkCmdSetDepthCompareOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOpEXT" ) );
+      vkCmdSetDepthTestEnableEXT = PFN_vkCmdSetDepthTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnableEXT" ) );
+      vkCmdSetDepthWriteEnableEXT = PFN_vkCmdSetDepthWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnableEXT" ) );
+      vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMaskKHR" ) );
+      vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMask" ) );
+      if ( !vkCmdSetDeviceMask ) vkCmdSetDeviceMask = vkCmdSetDeviceMaskKHR;
+      vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEXT" ) );
+      vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetDeviceProcAddr( device, "vkCmdSetEvent" ) );
+      vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorNV" ) );
+      vkCmdSetFragmentShadingRateKHR = PFN_vkCmdSetFragmentShadingRateKHR( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateKHR" ) );
+      vkCmdSetFrontFaceEXT = PFN_vkCmdSetFrontFaceEXT( vkGetDeviceProcAddr( device, "vkCmdSetFrontFaceEXT" ) );
+      vkCmdSetLineStippleEXT = PFN_vkCmdSetLineStippleEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEXT" ) );
+      vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetDeviceProcAddr( device, "vkCmdSetLineWidth" ) );
+      vkCmdSetPerformanceMarkerINTEL = PFN_vkCmdSetPerformanceMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceMarkerINTEL" ) );
+      vkCmdSetPerformanceOverrideINTEL = PFN_vkCmdSetPerformanceOverrideINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceOverrideINTEL" ) );
+      vkCmdSetPerformanceStreamMarkerINTEL = PFN_vkCmdSetPerformanceStreamMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceStreamMarkerINTEL" ) );
+      vkCmdSetPrimitiveTopologyEXT = PFN_vkCmdSetPrimitiveTopologyEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopologyEXT" ) );
+      vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEXT" ) );
+      vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetDeviceProcAddr( device, "vkCmdSetScissor" ) );
+      vkCmdSetScissorWithCountEXT = PFN_vkCmdSetScissorWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCountEXT" ) );
+      vkCmdSetStencilCompareMask = PFN_vkCmdSetStencilCompareMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilCompareMask" ) );
+      vkCmdSetStencilOpEXT = PFN_vkCmdSetStencilOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilOpEXT" ) );
+      vkCmdSetStencilReference = PFN_vkCmdSetStencilReference( vkGetDeviceProcAddr( device, "vkCmdSetStencilReference" ) );
+      vkCmdSetStencilTestEnableEXT = PFN_vkCmdSetStencilTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnableEXT" ) );
+      vkCmdSetStencilWriteMask = PFN_vkCmdSetStencilWriteMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilWriteMask" ) );
+      vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetDeviceProcAddr( device, "vkCmdSetViewport" ) );
+      vkCmdSetViewportShadingRatePaletteNV = PFN_vkCmdSetViewportShadingRatePaletteNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportShadingRatePaletteNV" ) );
+      vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingNV" ) );
+      vkCmdSetViewportWithCountEXT = PFN_vkCmdSetViewportWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCountEXT" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+      vkCmdTraceRaysIndirectKHR = PFN_vkCmdTraceRaysIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirectKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+      vkCmdTraceRaysKHR = PFN_vkCmdTraceRaysKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+      vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetDeviceProcAddr( device, "vkCmdTraceRaysNV" ) );
+      vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetDeviceProcAddr( device, "vkCmdUpdateBuffer" ) );
+      vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetDeviceProcAddr( device, "vkCmdWaitEvents" ) );
+      vkCmdWriteAccelerationStructuresPropertiesNV = PFN_vkCmdWriteAccelerationStructuresPropertiesNV( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesNV" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+      vkCmdWriteAccelerationStructuresPropertiesKHR = PFN_vkCmdWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) );
+      if ( !vkCmdWriteAccelerationStructuresPropertiesKHR ) vkCmdWriteAccelerationStructuresPropertiesKHR = vkCmdWriteAccelerationStructuresPropertiesNV;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+      vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarkerAMD" ) );
+      vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp" ) );
+      vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetDeviceProcAddr( device, "vkCompileDeferredNV" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+      vkCopyAccelerationStructureKHR = PFN_vkCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+      vkCopyAccelerationStructureToMemoryKHR = PFN_vkCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureToMemoryKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+      vkCopyMemoryToAccelerationStructureKHR = PFN_vkCopyMemoryToAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyMemoryToAccelerationStructureKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+      vkCreateAccelerationStructureKHR = PFN_vkCreateAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+      vkCreateAccelerationStructureNV = PFN_vkCreateAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureNV" ) );
+      vkCreateBuffer = PFN_vkCreateBuffer( vkGetDeviceProcAddr( device, "vkCreateBuffer" ) );
+      vkCreateBufferView = PFN_vkCreateBufferView( vkGetDeviceProcAddr( device, "vkCreateBufferView" ) );
+      vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetDeviceProcAddr( device, "vkCreateCommandPool" ) );
+      vkCreateComputePipelines = PFN_vkCreateComputePipelines( vkGetDeviceProcAddr( device, "vkCreateComputePipelines" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+      vkCreateDeferredOperationKHR = PFN_vkCreateDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkCreateDeferredOperationKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+      vkCreateDescriptorPool = PFN_vkCreateDescriptorPool( vkGetDeviceProcAddr( device, "vkCreateDescriptorPool" ) );
+      vkCreateDescriptorSetLayout = PFN_vkCreateDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkCreateDescriptorSetLayout" ) );
+      vkCreateDescriptorUpdateTemplateKHR = PFN_vkCreateDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplateKHR" ) );
+      vkCreateDescriptorUpdateTemplate = PFN_vkCreateDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplate" ) );
+      if ( !vkCreateDescriptorUpdateTemplate ) vkCreateDescriptorUpdateTemplate = vkCreateDescriptorUpdateTemplateKHR;
+      vkCreateEvent = PFN_vkCreateEvent( vkGetDeviceProcAddr( device, "vkCreateEvent" ) );
+      vkCreateFence = PFN_vkCreateFence( vkGetDeviceProcAddr( device, "vkCreateFence" ) );
+      vkCreateFramebuffer = PFN_vkCreateFramebuffer( vkGetDeviceProcAddr( device, "vkCreateFramebuffer" ) );
+      vkCreateGraphicsPipelines = PFN_vkCreateGraphicsPipelines( vkGetDeviceProcAddr( device, "vkCreateGraphicsPipelines" ) );
+      vkCreateImage = PFN_vkCreateImage( vkGetDeviceProcAddr( device, "vkCreateImage" ) );
+      vkCreateImageView = PFN_vkCreateImageView( vkGetDeviceProcAddr( device, "vkCreateImageView" ) );
+      vkCreateIndirectCommandsLayoutNV = PFN_vkCreateIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkCreateIndirectCommandsLayoutNV" ) );
+      vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetDeviceProcAddr( device, "vkCreatePipelineCache" ) );
+      vkCreatePipelineLayout = PFN_vkCreatePipelineLayout( vkGetDeviceProcAddr( device, "vkCreatePipelineLayout" ) );
+      vkCreatePrivateDataSlotEXT = PFN_vkCreatePrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlotEXT" ) );
+      vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetDeviceProcAddr( device, "vkCreateQueryPool" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+      vkCreateRayTracingPipelinesKHR = PFN_vkCreateRayTracingPipelinesKHR( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+      vkCreateRayTracingPipelinesNV = PFN_vkCreateRayTracingPipelinesNV( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesNV" ) );
+      vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetDeviceProcAddr( device, "vkCreateRenderPass" ) );
+      vkCreateRenderPass2KHR = PFN_vkCreateRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCreateRenderPass2KHR" ) );
+      vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetDeviceProcAddr( device, "vkCreateRenderPass2" ) );
+      if ( !vkCreateRenderPass2 ) vkCreateRenderPass2 = vkCreateRenderPass2KHR;
+      vkCreateSampler = PFN_vkCreateSampler( vkGetDeviceProcAddr( device, "vkCreateSampler" ) );
+      vkCreateSamplerYcbcrConversionKHR = PFN_vkCreateSamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversionKHR" ) );
+      vkCreateSamplerYcbcrConversion = PFN_vkCreateSamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversion" ) );
+      if ( !vkCreateSamplerYcbcrConversion ) vkCreateSamplerYcbcrConversion = vkCreateSamplerYcbcrConversionKHR;
+      vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetDeviceProcAddr( device, "vkCreateSemaphore" ) );
+      vkCreateShaderModule = PFN_vkCreateShaderModule( vkGetDeviceProcAddr( device, "vkCreateShaderModule" ) );
+      vkCreateSharedSwapchainsKHR = PFN_vkCreateSharedSwapchainsKHR( vkGetDeviceProcAddr( device, "vkCreateSharedSwapchainsKHR" ) );
+      vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR( vkGetDeviceProcAddr( device, "vkCreateSwapchainKHR" ) );
+      vkCreateValidationCacheEXT = PFN_vkCreateValidationCacheEXT( vkGetDeviceProcAddr( device, "vkCreateValidationCacheEXT" ) );
+      vkDebugMarkerSetObjectNameEXT = PFN_vkDebugMarkerSetObjectNameEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectNameEXT" ) );
+      vkDebugMarkerSetObjectTagEXT = PFN_vkDebugMarkerSetObjectTagEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectTagEXT" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+      vkDeferredOperationJoinKHR = PFN_vkDeferredOperationJoinKHR( vkGetDeviceProcAddr( device, "vkDeferredOperationJoinKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+      vkDestroyAccelerationStructureNV = PFN_vkDestroyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureNV" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+      vkDestroyAccelerationStructureKHR = PFN_vkDestroyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureKHR" ) );
+      if ( !vkDestroyAccelerationStructureKHR ) vkDestroyAccelerationStructureKHR = vkDestroyAccelerationStructureNV;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+      vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetDeviceProcAddr( device, "vkDestroyBuffer" ) );
+      vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetDeviceProcAddr( device, "vkDestroyBufferView" ) );
+      vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetDeviceProcAddr( device, "vkDestroyCommandPool" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+      vkDestroyDeferredOperationKHR = PFN_vkDestroyDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkDestroyDeferredOperationKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+      vkDestroyDescriptorPool = PFN_vkDestroyDescriptorPool( vkGetDeviceProcAddr( device, "vkDestroyDescriptorPool" ) );
+      vkDestroyDescriptorSetLayout = PFN_vkDestroyDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkDestroyDescriptorSetLayout" ) );
+      vkDestroyDescriptorUpdateTemplateKHR = PFN_vkDestroyDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplateKHR" ) );
+      vkDestroyDescriptorUpdateTemplate = PFN_vkDestroyDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplate" ) );
+      if ( !vkDestroyDescriptorUpdateTemplate ) vkDestroyDescriptorUpdateTemplate = vkDestroyDescriptorUpdateTemplateKHR;
+      vkDestroyDevice = PFN_vkDestroyDevice( vkGetDeviceProcAddr( device, "vkDestroyDevice" ) );
+      vkDestroyEvent = PFN_vkDestroyEvent( vkGetDeviceProcAddr( device, "vkDestroyEvent" ) );
+      vkDestroyFence = PFN_vkDestroyFence( vkGetDeviceProcAddr( device, "vkDestroyFence" ) );
+      vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetDeviceProcAddr( device, "vkDestroyFramebuffer" ) );
+      vkDestroyImage = PFN_vkDestroyImage( vkGetDeviceProcAddr( device, "vkDestroyImage" ) );
+      vkDestroyImageView = PFN_vkDestroyImageView( vkGetDeviceProcAddr( device, "vkDestroyImageView" ) );
+      vkDestroyIndirectCommandsLayoutNV = PFN_vkDestroyIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkDestroyIndirectCommandsLayoutNV" ) );
+      vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetDeviceProcAddr( device, "vkDestroyPipeline" ) );
+      vkDestroyPipelineCache = PFN_vkDestroyPipelineCache( vkGetDeviceProcAddr( device, "vkDestroyPipelineCache" ) );
+      vkDestroyPipelineLayout = PFN_vkDestroyPipelineLayout( vkGetDeviceProcAddr( device, "vkDestroyPipelineLayout" ) );
+      vkDestroyPrivateDataSlotEXT = PFN_vkDestroyPrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlotEXT" ) );
+      vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetDeviceProcAddr( device, "vkDestroyQueryPool" ) );
+      vkDestroyRenderPass = PFN_vkDestroyRenderPass( vkGetDeviceProcAddr( device, "vkDestroyRenderPass" ) );
+      vkDestroySampler = PFN_vkDestroySampler( vkGetDeviceProcAddr( device, "vkDestroySampler" ) );
+      vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversionKHR" ) );
+      vkDestroySamplerYcbcrConversion = PFN_vkDestroySamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversion" ) );
+      if ( !vkDestroySamplerYcbcrConversion ) vkDestroySamplerYcbcrConversion = vkDestroySamplerYcbcrConversionKHR;
+      vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetDeviceProcAddr( device, "vkDestroySemaphore" ) );
+      vkDestroyShaderModule = PFN_vkDestroyShaderModule( vkGetDeviceProcAddr( device, "vkDestroyShaderModule" ) );
+      vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR( vkGetDeviceProcAddr( device, "vkDestroySwapchainKHR" ) );
+      vkDestroyValidationCacheEXT = PFN_vkDestroyValidationCacheEXT( vkGetDeviceProcAddr( device, "vkDestroyValidationCacheEXT" ) );
+      vkDeviceWaitIdle = PFN_vkDeviceWaitIdle( vkGetDeviceProcAddr( device, "vkDeviceWaitIdle" ) );
+      vkDisplayPowerControlEXT = PFN_vkDisplayPowerControlEXT( vkGetDeviceProcAddr( device, "vkDisplayPowerControlEXT" ) );
+      vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetDeviceProcAddr( device, "vkEndCommandBuffer" ) );
+      vkFlushMappedMemoryRanges = PFN_vkFlushMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkFlushMappedMemoryRanges" ) );
+      vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetDeviceProcAddr( device, "vkFreeCommandBuffers" ) );
+      vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetDeviceProcAddr( device, "vkFreeDescriptorSets" ) );
+      vkFreeMemory = PFN_vkFreeMemory( vkGetDeviceProcAddr( device, "vkFreeMemory" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+      vkGetAccelerationStructureDeviceAddressKHR = PFN_vkGetAccelerationStructureDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureDeviceAddressKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+      vkGetAccelerationStructureHandleNV = PFN_vkGetAccelerationStructureHandleNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureHandleNV" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+      vkGetAccelerationStructureMemoryRequirementsKHR = PFN_vkGetAccelerationStructureMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureMemoryRequirementsKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+      vkGetAccelerationStructureMemoryRequirementsNV = PFN_vkGetAccelerationStructureMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureMemoryRequirementsNV" ) );
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+      vkGetAndroidHardwareBufferPropertiesANDROID = PFN_vkGetAndroidHardwareBufferPropertiesANDROID( vkGetDeviceProcAddr( device, "vkGetAndroidHardwareBufferPropertiesANDROID" ) );
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+      vkGetBufferDeviceAddressEXT = PFN_vkGetBufferDeviceAddressEXT( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressEXT" ) );
+      vkGetBufferDeviceAddressKHR = PFN_vkGetBufferDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressKHR" ) );
+      vkGetBufferDeviceAddress = PFN_vkGetBufferDeviceAddress( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddress" ) );
+      if ( !vkGetBufferDeviceAddress ) vkGetBufferDeviceAddress = vkGetBufferDeviceAddressKHR;
+      if ( !vkGetBufferDeviceAddress ) vkGetBufferDeviceAddress = vkGetBufferDeviceAddressEXT;
+      vkGetBufferMemoryRequirements = PFN_vkGetBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements" ) );
+      vkGetBufferMemoryRequirements2KHR = PFN_vkGetBufferMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2KHR" ) );
+      vkGetBufferMemoryRequirements2 = PFN_vkGetBufferMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2" ) );
+      if ( !vkGetBufferMemoryRequirements2 ) vkGetBufferMemoryRequirements2 = vkGetBufferMemoryRequirements2KHR;
+      vkGetBufferOpaqueCaptureAddressKHR = PFN_vkGetBufferOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddressKHR" ) );
+      vkGetBufferOpaqueCaptureAddress = PFN_vkGetBufferOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddress" ) );
+      if ( !vkGetBufferOpaqueCaptureAddress ) vkGetBufferOpaqueCaptureAddress = vkGetBufferOpaqueCaptureAddressKHR;
+      vkGetCalibratedTimestampsEXT = PFN_vkGetCalibratedTimestampsEXT( vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsEXT" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+      vkGetDeferredOperationMaxConcurrencyKHR = PFN_vkGetDeferredOperationMaxConcurrencyKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationMaxConcurrencyKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+      vkGetDeferredOperationResultKHR = PFN_vkGetDeferredOperationResultKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationResultKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+      vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupportKHR" ) );
+      vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupport" ) );
+      if ( !vkGetDescriptorSetLayoutSupport ) vkGetDescriptorSetLayoutSupport = vkGetDescriptorSetLayoutSupportKHR;
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+      vkGetDeviceAccelerationStructureCompatibilityKHR = PFN_vkGetDeviceAccelerationStructureCompatibilityKHR( vkGetDeviceProcAddr( device, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+      vkGetDeviceGroupPeerMemoryFeaturesKHR = PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) );
+      vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeatures" ) );
+      if ( !vkGetDeviceGroupPeerMemoryFeatures ) vkGetDeviceGroupPeerMemoryFeatures = vkGetDeviceGroupPeerMemoryFeaturesKHR;
+      vkGetDeviceGroupPresentCapabilitiesKHR = PFN_vkGetDeviceGroupPresentCapabilitiesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPresentCapabilitiesKHR" ) );
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+      vkGetDeviceGroupSurfacePresentModes2EXT = PFN_vkGetDeviceGroupSurfacePresentModes2EXT( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModes2EXT" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+      vkGetDeviceGroupSurfacePresentModesKHR = PFN_vkGetDeviceGroupSurfacePresentModesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModesKHR" ) );
+      vkGetDeviceMemoryCommitment = PFN_vkGetDeviceMemoryCommitment( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryCommitment" ) );
+      vkGetDeviceMemoryOpaqueCaptureAddressKHR = PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) );
+      vkGetDeviceMemoryOpaqueCaptureAddress = PFN_vkGetDeviceMemoryOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddress" ) );
+      if ( !vkGetDeviceMemoryOpaqueCaptureAddress ) vkGetDeviceMemoryOpaqueCaptureAddress = vkGetDeviceMemoryOpaqueCaptureAddressKHR;
+      vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetDeviceProcAddr( device, "vkGetDeviceProcAddr" ) );
+      vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetDeviceProcAddr( device, "vkGetDeviceQueue" ) );
+      vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetDeviceProcAddr( device, "vkGetDeviceQueue2" ) );
+      vkGetEventStatus = PFN_vkGetEventStatus( vkGetDeviceProcAddr( device, "vkGetEventStatus" ) );
+      vkGetFenceFdKHR = PFN_vkGetFenceFdKHR( vkGetDeviceProcAddr( device, "vkGetFenceFdKHR" ) );
+      vkGetFenceStatus = PFN_vkGetFenceStatus( vkGetDeviceProcAddr( device, "vkGetFenceStatus" ) );
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+      vkGetFenceWin32HandleKHR = PFN_vkGetFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetFenceWin32HandleKHR" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+      vkGetGeneratedCommandsMemoryRequirementsNV = PFN_vkGetGeneratedCommandsMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetGeneratedCommandsMemoryRequirementsNV" ) );
+      vkGetImageDrmFormatModifierPropertiesEXT = PFN_vkGetImageDrmFormatModifierPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetImageDrmFormatModifierPropertiesEXT" ) );
+      vkGetImageMemoryRequirements = PFN_vkGetImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements" ) );
+      vkGetImageMemoryRequirements2KHR = PFN_vkGetImageMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2KHR" ) );
+      vkGetImageMemoryRequirements2 = PFN_vkGetImageMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2" ) );
+      if ( !vkGetImageMemoryRequirements2 ) vkGetImageMemoryRequirements2 = vkGetImageMemoryRequirements2KHR;
+      vkGetImageSparseMemoryRequirements = PFN_vkGetImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements" ) );
+      vkGetImageSparseMemoryRequirements2KHR = PFN_vkGetImageSparseMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2KHR" ) );
+      vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2" ) );
+      if ( !vkGetImageSparseMemoryRequirements2 ) vkGetImageSparseMemoryRequirements2 = vkGetImageSparseMemoryRequirements2KHR;
+      vkGetImageSubresourceLayout = PFN_vkGetImageSubresourceLayout( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout" ) );
+      vkGetImageViewAddressNVX = PFN_vkGetImageViewAddressNVX( vkGetDeviceProcAddr( device, "vkGetImageViewAddressNVX" ) );
+      vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetDeviceProcAddr( device, "vkGetImageViewHandleNVX" ) );
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+      vkGetMemoryAndroidHardwareBufferANDROID = PFN_vkGetMemoryAndroidHardwareBufferANDROID( vkGetDeviceProcAddr( device, "vkGetMemoryAndroidHardwareBufferANDROID" ) );
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+      vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdKHR" ) );
+      vkGetMemoryFdPropertiesKHR = PFN_vkGetMemoryFdPropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdPropertiesKHR" ) );
+      vkGetMemoryHostPointerPropertiesEXT = PFN_vkGetMemoryHostPointerPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetMemoryHostPointerPropertiesEXT" ) );
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+      vkGetMemoryWin32HandleKHR = PFN_vkGetMemoryWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleKHR" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+      vkGetMemoryWin32HandleNV = PFN_vkGetMemoryWin32HandleNV( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleNV" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+      vkGetMemoryWin32HandlePropertiesKHR = PFN_vkGetMemoryWin32HandlePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandlePropertiesKHR" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+      vkGetPastPresentationTimingGOOGLE = PFN_vkGetPastPresentationTimingGOOGLE( vkGetDeviceProcAddr( device, "vkGetPastPresentationTimingGOOGLE" ) );
+      vkGetPerformanceParameterINTEL = PFN_vkGetPerformanceParameterINTEL( vkGetDeviceProcAddr( device, "vkGetPerformanceParameterINTEL" ) );
+      vkGetPipelineCacheData = PFN_vkGetPipelineCacheData( vkGetDeviceProcAddr( device, "vkGetPipelineCacheData" ) );
+      vkGetPipelineExecutableInternalRepresentationsKHR = PFN_vkGetPipelineExecutableInternalRepresentationsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableInternalRepresentationsKHR" ) );
+      vkGetPipelineExecutablePropertiesKHR = PFN_vkGetPipelineExecutablePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutablePropertiesKHR" ) );
+      vkGetPipelineExecutableStatisticsKHR = PFN_vkGetPipelineExecutableStatisticsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableStatisticsKHR" ) );
+      vkGetPrivateDataEXT = PFN_vkGetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkGetPrivateDataEXT" ) );
+      vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetDeviceProcAddr( device, "vkGetQueryPoolResults" ) );
+      vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointDataNV" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+      vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+      vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesNV" ) );
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+      vkGetRayTracingShaderGroupHandlesKHR = PFN_vkGetRayTracingShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesKHR" ) );
+      if ( !vkGetRayTracingShaderGroupHandlesKHR ) vkGetRayTracingShaderGroupHandlesKHR = vkGetRayTracingShaderGroupHandlesNV;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+      vkGetRefreshCycleDurationGOOGLE = PFN_vkGetRefreshCycleDurationGOOGLE( vkGetDeviceProcAddr( device, "vkGetRefreshCycleDurationGOOGLE" ) );
+      vkGetRenderAreaGranularity = PFN_vkGetRenderAreaGranularity( vkGetDeviceProcAddr( device, "vkGetRenderAreaGranularity" ) );
+      vkGetSemaphoreCounterValueKHR = PFN_vkGetSemaphoreCounterValueKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValueKHR" ) );
+      vkGetSemaphoreCounterValue = PFN_vkGetSemaphoreCounterValue( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValue" ) );
+      if ( !vkGetSemaphoreCounterValue ) vkGetSemaphoreCounterValue = vkGetSemaphoreCounterValueKHR;
+      vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreFdKHR" ) );
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+      vkGetSemaphoreWin32HandleKHR = PFN_vkGetSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreWin32HandleKHR" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+      vkGetShaderInfoAMD = PFN_vkGetShaderInfoAMD( vkGetDeviceProcAddr( device, "vkGetShaderInfoAMD" ) );
+      vkGetSwapchainCounterEXT = PFN_vkGetSwapchainCounterEXT( vkGetDeviceProcAddr( device, "vkGetSwapchainCounterEXT" ) );
+      vkGetSwapchainImagesKHR = PFN_vkGetSwapchainImagesKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainImagesKHR" ) );
+      vkGetSwapchainStatusKHR = PFN_vkGetSwapchainStatusKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainStatusKHR" ) );
+      vkGetValidationCacheDataEXT = PFN_vkGetValidationCacheDataEXT( vkGetDeviceProcAddr( device, "vkGetValidationCacheDataEXT" ) );
+      vkImportFenceFdKHR = PFN_vkImportFenceFdKHR( vkGetDeviceProcAddr( device, "vkImportFenceFdKHR" ) );
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+      vkImportFenceWin32HandleKHR = PFN_vkImportFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportFenceWin32HandleKHR" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+      vkImportSemaphoreFdKHR = PFN_vkImportSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreFdKHR" ) );
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+      vkImportSemaphoreWin32HandleKHR = PFN_vkImportSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreWin32HandleKHR" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+      vkInitializePerformanceApiINTEL = PFN_vkInitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkInitializePerformanceApiINTEL" ) );
+      vkInvalidateMappedMemoryRanges = PFN_vkInvalidateMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkInvalidateMappedMemoryRanges" ) );
+      vkMapMemory = PFN_vkMapMemory( vkGetDeviceProcAddr( device, "vkMapMemory" ) );
+      vkMergePipelineCaches = PFN_vkMergePipelineCaches( vkGetDeviceProcAddr( device, "vkMergePipelineCaches" ) );
+      vkMergeValidationCachesEXT = PFN_vkMergeValidationCachesEXT( vkGetDeviceProcAddr( device, "vkMergeValidationCachesEXT" ) );
+      vkQueueBeginDebugUtilsLabelEXT = PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueBeginDebugUtilsLabelEXT" ) );
+      vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetDeviceProcAddr( device, "vkQueueBindSparse" ) );
+      vkQueueEndDebugUtilsLabelEXT = PFN_vkQueueEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueEndDebugUtilsLabelEXT" ) );
+      vkQueueInsertDebugUtilsLabelEXT = PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueInsertDebugUtilsLabelEXT" ) );
+      vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetDeviceProcAddr( device, "vkQueuePresentKHR" ) );
+      vkQueueSetPerformanceConfigurationINTEL = PFN_vkQueueSetPerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkQueueSetPerformanceConfigurationINTEL" ) );
+      vkQueueSubmit = PFN_vkQueueSubmit( vkGetDeviceProcAddr( device, "vkQueueSubmit" ) );
+      vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetDeviceProcAddr( device, "vkQueueWaitIdle" ) );
+      vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDeviceEventEXT" ) );
+      vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDisplayEventEXT" ) );
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+      vkReleaseFullScreenExclusiveModeEXT = PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkReleaseFullScreenExclusiveModeEXT" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+      vkReleasePerformanceConfigurationINTEL = PFN_vkReleasePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkReleasePerformanceConfigurationINTEL" ) );
+      vkReleaseProfilingLockKHR = PFN_vkReleaseProfilingLockKHR( vkGetDeviceProcAddr( device, "vkReleaseProfilingLockKHR" ) );
+      vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetDeviceProcAddr( device, "vkResetCommandBuffer" ) );
+      vkResetCommandPool = PFN_vkResetCommandPool( vkGetDeviceProcAddr( device, "vkResetCommandPool" ) );
+      vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetDeviceProcAddr( device, "vkResetDescriptorPool" ) );
+      vkResetEvent = PFN_vkResetEvent( vkGetDeviceProcAddr( device, "vkResetEvent" ) );
+      vkResetFences = PFN_vkResetFences( vkGetDeviceProcAddr( device, "vkResetFences" ) );
+      vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetDeviceProcAddr( device, "vkResetQueryPoolEXT" ) );
+      vkResetQueryPool = PFN_vkResetQueryPool( vkGetDeviceProcAddr( device, "vkResetQueryPool" ) );
+      if ( !vkResetQueryPool ) vkResetQueryPool = vkResetQueryPoolEXT;
+      vkSetDebugUtilsObjectNameEXT = PFN_vkSetDebugUtilsObjectNameEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectNameEXT" ) );
+      vkSetDebugUtilsObjectTagEXT = PFN_vkSetDebugUtilsObjectTagEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectTagEXT" ) );
+      vkSetEvent = PFN_vkSetEvent( vkGetDeviceProcAddr( device, "vkSetEvent" ) );
+      vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetDeviceProcAddr( device, "vkSetHdrMetadataEXT" ) );
+      vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetDeviceProcAddr( device, "vkSetLocalDimmingAMD" ) );
+      vkSetPrivateDataEXT = PFN_vkSetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkSetPrivateDataEXT" ) );
+      vkSignalSemaphoreKHR = PFN_vkSignalSemaphoreKHR( vkGetDeviceProcAddr( device, "vkSignalSemaphoreKHR" ) );
+      vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetDeviceProcAddr( device, "vkSignalSemaphore" ) );
+      if ( !vkSignalSemaphore ) vkSignalSemaphore = vkSignalSemaphoreKHR;
+      vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetDeviceProcAddr( device, "vkTrimCommandPoolKHR" ) );
+      vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetDeviceProcAddr( device, "vkTrimCommandPool" ) );
+      if ( !vkTrimCommandPool ) vkTrimCommandPool = vkTrimCommandPoolKHR;
+      vkUninitializePerformanceApiINTEL = PFN_vkUninitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkUninitializePerformanceApiINTEL" ) );
+      vkUnmapMemory = PFN_vkUnmapMemory( vkGetDeviceProcAddr( device, "vkUnmapMemory" ) );
+      vkUpdateDescriptorSetWithTemplateKHR = PFN_vkUpdateDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplateKHR" ) );
+      vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplate" ) );
+      if ( !vkUpdateDescriptorSetWithTemplate ) vkUpdateDescriptorSetWithTemplate = vkUpdateDescriptorSetWithTemplateKHR;
+      vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSets" ) );
+      vkWaitForFences = PFN_vkWaitForFences( vkGetDeviceProcAddr( device, "vkWaitForFences" ) );
+      vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetDeviceProcAddr( device, "vkWaitSemaphoresKHR" ) );
+      vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetDeviceProcAddr( device, "vkWaitSemaphores" ) );
+      if ( !vkWaitSemaphores ) vkWaitSemaphores = vkWaitSemaphoresKHR;
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+      vkWriteAccelerationStructuresPropertiesKHR = PFN_vkWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkWriteAccelerationStructuresPropertiesKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
     }
   };
+
 } // namespace VULKAN_HPP_NAMESPACE
 
+namespace std
+{
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::AccelerationStructureKHR const& accelerationStructureKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkAccelerationStructureKHR>{}(static_cast<VkAccelerationStructureKHR>(accelerationStructureKHR));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::Buffer>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::Buffer const& buffer) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkBuffer>{}(static_cast<VkBuffer>(buffer));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::BufferView>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::BufferView const& bufferView) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkBufferView>{}(static_cast<VkBufferView>(bufferView));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::CommandBuffer>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::CommandBuffer const& commandBuffer) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkCommandBuffer>{}(static_cast<VkCommandBuffer>(commandBuffer));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::CommandPool>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::CommandPool const& commandPool) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkCommandPool>{}(static_cast<VkCommandPool>(commandPool));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT const& debugReportCallbackEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkDebugReportCallbackEXT>{}(static_cast<VkDebugReportCallbackEXT>(debugReportCallbackEXT));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT const& debugUtilsMessengerEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkDebugUtilsMessengerEXT>{}(static_cast<VkDebugUtilsMessengerEXT>(debugUtilsMessengerEXT));
+    }
+  };
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DeferredOperationKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DeferredOperationKHR const& deferredOperationKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkDeferredOperationKHR>{}(static_cast<VkDeferredOperationKHR>(deferredOperationKHR));
+    }
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DescriptorPool>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DescriptorPool const& descriptorPool) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkDescriptorPool>{}(static_cast<VkDescriptorPool>(descriptorPool));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DescriptorSet>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DescriptorSet const& descriptorSet) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkDescriptorSet>{}(static_cast<VkDescriptorSet>(descriptorSet));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DescriptorSetLayout>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DescriptorSetLayout const& descriptorSetLayout) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkDescriptorSetLayout>{}(static_cast<VkDescriptorSetLayout>(descriptorSetLayout));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate const& descriptorUpdateTemplate) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkDescriptorUpdateTemplate>{}(static_cast<VkDescriptorUpdateTemplate>(descriptorUpdateTemplate));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::Device>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::Device const& device) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkDevice>{}(static_cast<VkDevice>(device));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DeviceMemory>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DeviceMemory const& deviceMemory) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkDeviceMemory>{}(static_cast<VkDeviceMemory>(deviceMemory));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DisplayKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DisplayKHR const& displayKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkDisplayKHR>{}(static_cast<VkDisplayKHR>(displayKHR));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::DisplayModeKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::DisplayModeKHR const& displayModeKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkDisplayModeKHR>{}(static_cast<VkDisplayModeKHR>(displayModeKHR));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::Event>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::Event const& event) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkEvent>{}(static_cast<VkEvent>(event));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::Fence>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::Fence const& fence) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkFence>{}(static_cast<VkFence>(fence));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::Framebuffer>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::Framebuffer const& framebuffer) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkFramebuffer>{}(static_cast<VkFramebuffer>(framebuffer));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::Image>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::Image const& image) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkImage>{}(static_cast<VkImage>(image));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ImageView>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ImageView const& imageView) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkImageView>{}(static_cast<VkImageView>(imageView));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV const& indirectCommandsLayoutNV) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkIndirectCommandsLayoutNV>{}(static_cast<VkIndirectCommandsLayoutNV>(indirectCommandsLayoutNV));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::Instance>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::Instance const& instance) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkInstance>{}(static_cast<VkInstance>(instance));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL const& performanceConfigurationINTEL) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkPerformanceConfigurationINTEL>{}(static_cast<VkPerformanceConfigurationINTEL>(performanceConfigurationINTEL));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDevice>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDevice const& physicalDevice) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkPhysicalDevice>{}(static_cast<VkPhysicalDevice>(physicalDevice));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::Pipeline>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::Pipeline const& pipeline) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkPipeline>{}(static_cast<VkPipeline>(pipeline));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PipelineCache>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PipelineCache const& pipelineCache) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkPipelineCache>{}(static_cast<VkPipelineCache>(pipelineCache));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PipelineLayout>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PipelineLayout const& pipelineLayout) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkPipelineLayout>{}(static_cast<VkPipelineLayout>(pipelineLayout));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT const& privateDataSlotEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkPrivateDataSlotEXT>{}(static_cast<VkPrivateDataSlotEXT>(privateDataSlotEXT));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::QueryPool>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::QueryPool const& queryPool) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkQueryPool>{}(static_cast<VkQueryPool>(queryPool));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::Queue>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::Queue const& queue) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkQueue>{}(static_cast<VkQueue>(queue));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::RenderPass>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::RenderPass const& renderPass) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkRenderPass>{}(static_cast<VkRenderPass>(renderPass));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::Sampler>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::Sampler const& sampler) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkSampler>{}(static_cast<VkSampler>(sampler));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion const& samplerYcbcrConversion) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkSamplerYcbcrConversion>{}(static_cast<VkSamplerYcbcrConversion>(samplerYcbcrConversion));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::Semaphore>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::Semaphore const& semaphore) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkSemaphore>{}(static_cast<VkSemaphore>(semaphore));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ShaderModule>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ShaderModule const& shaderModule) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkShaderModule>{}(static_cast<VkShaderModule>(shaderModule));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SurfaceKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SurfaceKHR const& surfaceKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkSurfaceKHR>{}(static_cast<VkSurfaceKHR>(surfaceKHR));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::SwapchainKHR>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::SwapchainKHR const& swapchainKHR) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkSwapchainKHR>{}(static_cast<VkSwapchainKHR>(swapchainKHR));
+    }
+  };
+
+  template <> struct hash<VULKAN_HPP_NAMESPACE::ValidationCacheEXT>
+  {
+    std::size_t operator()(VULKAN_HPP_NAMESPACE::ValidationCacheEXT const& validationCacheEXT) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::hash<VkValidationCacheEXT>{}(static_cast<VkValidationCacheEXT>(validationCacheEXT));
+    }
+  };
+}
 #endif

--
Gitblit v1.9.3